djl
djl copied to clipboard
can't load llama3 model
<dependencyManagement>
<dependencies>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>bom</artifactId>
<version>${djl.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
<version>1.6.0</version>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>api</artifactId>
</dependency>
<dependency>
<groupId>ai.djl</groupId>
<artifactId>model-zoo</artifactId>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-model-zoo</artifactId>
</dependency>
<dependency>
<groupId>ai.djl.huggingface</groupId>
<artifactId>tokenizers</artifactId>
</dependency>
<dependency>
<groupId>ai.djl.pytorch</groupId>
<artifactId>pytorch-engine</artifactId>
<scope>runtime</scope>
</dependency>
</dependencies>
Criteria<String, String> criteria = Criteria.builder() .setTypes(String.class, String.class) .optModelPath(modelDir) .optEngine("PyTorch") // Modern safetensors options (DJL 0.33.0+) .optOption("modelType", "safetensors") .optOption("shardModel", "true") .optOption("modelName", "model") // Must match index prefix .optOption("useSafetensors", "true") // Required for modern DJL .optOption("hasParameter", "true") .optOption("mapLocation", "true") // Performance options .optOption("dtype", "float16") // 👇 Specify the Hugging Face task directly .optArgument("task", "text-generation") // Key change! .build();
ai.djl.repository.zoo.ModelNotFoundException: No model with the specified URI or the matching Input/Output type is found. at ai.djl.repository.zoo.Criteria.loadModel(Criteria.java:162) at ai.djl.repository.zoo.ModelZoo.loadModel(ModelZoo.java:179)
Can't load llama3 model for DJL 0.33.0, What's the configuration?