mirror of
				https://github.com/amithkoujalgi/ollama4j.git
				synced 2025-11-04 02:20:50 +01:00 
			
		
		
		
	Refactor exception handling by replacing OllamaBaseException with OllamaException across the codebase. Update relevant method signatures and import statements accordingly.
This commit is contained in:
		@@ -9,7 +9,7 @@
 | 
			
		||||
package io.github.ollama4j;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.databind.ObjectMapper;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaException;
 | 
			
		||||
import io.github.ollama4j.exceptions.RoleNotFoundException;
 | 
			
		||||
import io.github.ollama4j.exceptions.ToolInvocationException;
 | 
			
		||||
import io.github.ollama4j.metrics.MetricsRecorder;
 | 
			
		||||
@@ -150,9 +150,9 @@ public class OllamaAPI {
 | 
			
		||||
     * Checks the reachability of the Ollama server.
 | 
			
		||||
     *
 | 
			
		||||
     * @return true if the server is reachable, false otherwise
 | 
			
		||||
     * @throws OllamaBaseException if the ping fails
 | 
			
		||||
     * @throws OllamaException if the ping fails
 | 
			
		||||
     */
 | 
			
		||||
    public boolean ping() throws OllamaBaseException {
 | 
			
		||||
    public boolean ping() throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/tags";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -175,7 +175,7 @@ public class OllamaAPI {
 | 
			
		||||
            statusCode = response.statusCode();
 | 
			
		||||
            return statusCode == 200;
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException("Ping failed", e);
 | 
			
		||||
            throw new OllamaException("Ping failed", e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -186,9 +186,9 @@ public class OllamaAPI {
 | 
			
		||||
     * Provides a list of running models and details about each model currently loaded into memory.
 | 
			
		||||
     *
 | 
			
		||||
     * @return ModelsProcessResponse containing details about the running models
 | 
			
		||||
     * @throws OllamaBaseException if the response indicates an error status
 | 
			
		||||
     * @throws OllamaException if the response indicates an error status
 | 
			
		||||
     */
 | 
			
		||||
    public ModelsProcessResponse ps() throws OllamaBaseException {
 | 
			
		||||
    public ModelsProcessResponse ps() throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/ps";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -208,7 +208,7 @@ public class OllamaAPI {
 | 
			
		||||
                                .GET()
 | 
			
		||||
                                .build();
 | 
			
		||||
            } catch (URISyntaxException e) {
 | 
			
		||||
                throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
                throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
            }
 | 
			
		||||
            HttpResponse<String> response = null;
 | 
			
		||||
            response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
 | 
			
		||||
@@ -218,10 +218,10 @@ public class OllamaAPI {
 | 
			
		||||
                return Utils.getObjectMapper()
 | 
			
		||||
                        .readValue(responseString, ModelsProcessResponse.class);
 | 
			
		||||
            } else {
 | 
			
		||||
                throw new OllamaBaseException(statusCode + " - " + responseString);
 | 
			
		||||
                throw new OllamaException(statusCode + " - " + responseString);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException("ps failed", e);
 | 
			
		||||
            throw new OllamaException("ps failed", e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -232,9 +232,9 @@ public class OllamaAPI {
 | 
			
		||||
     * Lists available models from the Ollama server.
 | 
			
		||||
     *
 | 
			
		||||
     * @return a list of models available on the server
 | 
			
		||||
     * @throws OllamaBaseException if the response indicates an error status
 | 
			
		||||
     * @throws OllamaException if the response indicates an error status
 | 
			
		||||
     */
 | 
			
		||||
    public List<Model> listModels() throws OllamaBaseException {
 | 
			
		||||
    public List<Model> listModels() throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/tags";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -260,10 +260,10 @@ public class OllamaAPI {
 | 
			
		||||
                        .readValue(responseString, ListModelsResponse.class)
 | 
			
		||||
                        .getModels();
 | 
			
		||||
            } else {
 | 
			
		||||
                throw new OllamaBaseException(statusCode + " - " + responseString);
 | 
			
		||||
                throw new OllamaException(statusCode + " - " + responseString);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -309,9 +309,9 @@ public class OllamaAPI {
 | 
			
		||||
     * Internal method to pull a model from the Ollama server.
 | 
			
		||||
     *
 | 
			
		||||
     * @param modelName the name of the model to pull
 | 
			
		||||
     * @throws OllamaBaseException if the pull fails
 | 
			
		||||
     * @throws OllamaException if the pull fails
 | 
			
		||||
     */
 | 
			
		||||
    private void doPullModel(String modelName) throws OllamaBaseException {
 | 
			
		||||
    private void doPullModel(String modelName) throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/pull";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -348,13 +348,13 @@ public class OllamaAPI {
 | 
			
		||||
            }
 | 
			
		||||
            if (!success) {
 | 
			
		||||
                LOG.error("Model pull failed or returned invalid status.");
 | 
			
		||||
                throw new OllamaBaseException("Model pull failed or returned invalid status.");
 | 
			
		||||
                throw new OllamaException("Model pull failed or returned invalid status.");
 | 
			
		||||
            }
 | 
			
		||||
            if (statusCode != 200) {
 | 
			
		||||
                throw new OllamaBaseException(statusCode + " - " + responseString);
 | 
			
		||||
                throw new OllamaException(statusCode + " - " + responseString);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -368,18 +368,18 @@ public class OllamaAPI {
 | 
			
		||||
     * @param modelPullResponse the response from the model pull
 | 
			
		||||
     * @param modelName the name of the model
 | 
			
		||||
     * @return true if the pull was successful, false otherwise
 | 
			
		||||
     * @throws OllamaBaseException if the response contains an error
 | 
			
		||||
     * @throws OllamaException if the response contains an error
 | 
			
		||||
     */
 | 
			
		||||
    @SuppressWarnings("RedundantIfStatement")
 | 
			
		||||
    private boolean processModelPullResponse(ModelPullResponse modelPullResponse, String modelName)
 | 
			
		||||
            throws OllamaBaseException {
 | 
			
		||||
            throws OllamaException {
 | 
			
		||||
        if (modelPullResponse == null) {
 | 
			
		||||
            LOG.error("Received null response for model pull.");
 | 
			
		||||
            return false;
 | 
			
		||||
        }
 | 
			
		||||
        String error = modelPullResponse.getError();
 | 
			
		||||
        if (error != null && !error.trim().isEmpty()) {
 | 
			
		||||
            throw new OllamaBaseException("Model pull failed: " + error);
 | 
			
		||||
            throw new OllamaException("Model pull failed: " + error);
 | 
			
		||||
        }
 | 
			
		||||
        String status = modelPullResponse.getStatus();
 | 
			
		||||
        if (status != null) {
 | 
			
		||||
@@ -395,9 +395,9 @@ public class OllamaAPI {
 | 
			
		||||
     * Gets the Ollama server version.
 | 
			
		||||
     *
 | 
			
		||||
     * @return the version string
 | 
			
		||||
     * @throws OllamaBaseException if the request fails
 | 
			
		||||
     * @throws OllamaException if the request fails
 | 
			
		||||
     */
 | 
			
		||||
    public String getVersion() throws OllamaBaseException {
 | 
			
		||||
    public String getVersion() throws OllamaException {
 | 
			
		||||
        String url = "/api/version";
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -423,10 +423,10 @@ public class OllamaAPI {
 | 
			
		||||
                        .readValue(responseString, OllamaVersion.class)
 | 
			
		||||
                        .getVersion();
 | 
			
		||||
            } else {
 | 
			
		||||
                throw new OllamaBaseException(statusCode + " - " + responseString);
 | 
			
		||||
                throw new OllamaException(statusCode + " - " + responseString);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -439,9 +439,9 @@ public class OllamaAPI {
 | 
			
		||||
     * in the format "name:tag" to pull the corresponding model.
 | 
			
		||||
     *
 | 
			
		||||
     * @param modelName the name/tag of the model to be pulled. Ex: llama3:latest
 | 
			
		||||
     * @throws OllamaBaseException if the response indicates an error status
 | 
			
		||||
     * @throws OllamaException if the response indicates an error status
 | 
			
		||||
     */
 | 
			
		||||
    public void pullModel(String modelName) throws OllamaBaseException {
 | 
			
		||||
    public void pullModel(String modelName) throws OllamaException {
 | 
			
		||||
        try {
 | 
			
		||||
            if (numberOfRetriesForModelPull == 0) {
 | 
			
		||||
                this.doPullModel(modelName);
 | 
			
		||||
@@ -453,7 +453,7 @@ public class OllamaAPI {
 | 
			
		||||
                try {
 | 
			
		||||
                    this.doPullModel(modelName);
 | 
			
		||||
                    return;
 | 
			
		||||
                } catch (OllamaBaseException e) {
 | 
			
		||||
                } catch (OllamaException e) {
 | 
			
		||||
                    handlePullRetry(
 | 
			
		||||
                            modelName,
 | 
			
		||||
                            numberOfRetries,
 | 
			
		||||
@@ -462,14 +462,14 @@ public class OllamaAPI {
 | 
			
		||||
                    numberOfRetries++;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            throw new OllamaBaseException(
 | 
			
		||||
            throw new OllamaException(
 | 
			
		||||
                    "Failed to pull model "
 | 
			
		||||
                            + modelName
 | 
			
		||||
                            + " after "
 | 
			
		||||
                            + numberOfRetriesForModelPull
 | 
			
		||||
                            + " retries");
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@@ -478,9 +478,9 @@ public class OllamaAPI {
 | 
			
		||||
     *
 | 
			
		||||
     * @param modelName the model name
 | 
			
		||||
     * @return the model details
 | 
			
		||||
     * @throws OllamaBaseException if the response indicates an error status
 | 
			
		||||
     * @throws OllamaException if the response indicates an error status
 | 
			
		||||
     */
 | 
			
		||||
    public ModelDetail getModelDetails(String modelName) throws OllamaBaseException {
 | 
			
		||||
    public ModelDetail getModelDetails(String modelName) throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/show";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -505,10 +505,10 @@ public class OllamaAPI {
 | 
			
		||||
            if (statusCode == 200) {
 | 
			
		||||
                return Utils.getObjectMapper().readValue(responseBody, ModelDetail.class);
 | 
			
		||||
            } else {
 | 
			
		||||
                throw new OllamaBaseException(statusCode + " - " + responseBody);
 | 
			
		||||
                throw new OllamaException(statusCode + " - " + responseBody);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -520,9 +520,9 @@ public class OllamaAPI {
 | 
			
		||||
     * <a href="https://github.com/ollama/ollama/blob/main/docs/api.md#create-a-model">here</a>.
 | 
			
		||||
     *
 | 
			
		||||
     * @param customModelRequest custom model spec
 | 
			
		||||
     * @throws OllamaBaseException if the response indicates an error status
 | 
			
		||||
     * @throws OllamaException if the response indicates an error status
 | 
			
		||||
     */
 | 
			
		||||
    public void createModel(CustomModelRequest customModelRequest) throws OllamaBaseException {
 | 
			
		||||
    public void createModel(CustomModelRequest customModelRequest) throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/create";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -549,7 +549,7 @@ public class OllamaAPI {
 | 
			
		||||
                String errorBody =
 | 
			
		||||
                        new String(response.body().readAllBytes(), StandardCharsets.UTF_8);
 | 
			
		||||
                out = errorBody;
 | 
			
		||||
                throw new OllamaBaseException(statusCode + " - " + errorBody);
 | 
			
		||||
                throw new OllamaException(statusCode + " - " + errorBody);
 | 
			
		||||
            }
 | 
			
		||||
            try (BufferedReader reader =
 | 
			
		||||
                    new BufferedReader(
 | 
			
		||||
@@ -563,13 +563,13 @@ public class OllamaAPI {
 | 
			
		||||
                    LOG.debug(res.getStatus());
 | 
			
		||||
                    if (res.getError() != null) {
 | 
			
		||||
                        out = res.getError();
 | 
			
		||||
                        throw new OllamaBaseException(res.getError());
 | 
			
		||||
                        throw new OllamaException(res.getError());
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
                out = lines;
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -581,10 +581,9 @@ public class OllamaAPI {
 | 
			
		||||
     *
 | 
			
		||||
     * @param modelName the name of the model to be deleted
 | 
			
		||||
     * @param ignoreIfNotPresent ignore errors if the specified model is not present on the Ollama server
 | 
			
		||||
     * @throws OllamaBaseException if the response indicates an error status
 | 
			
		||||
     * @throws OllamaException if the response indicates an error status
 | 
			
		||||
     */
 | 
			
		||||
    public void deleteModel(String modelName, boolean ignoreIfNotPresent)
 | 
			
		||||
            throws OllamaBaseException {
 | 
			
		||||
    public void deleteModel(String modelName, boolean ignoreIfNotPresent) throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/delete";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -616,10 +615,10 @@ public class OllamaAPI {
 | 
			
		||||
                return;
 | 
			
		||||
            }
 | 
			
		||||
            if (statusCode != 200) {
 | 
			
		||||
                throw new OllamaBaseException(statusCode + " - " + responseBody);
 | 
			
		||||
                throw new OllamaException(statusCode + " - " + responseBody);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(statusCode + " - " + out, e);
 | 
			
		||||
            throw new OllamaException(statusCode + " - " + out, e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -633,9 +632,9 @@ public class OllamaAPI {
 | 
			
		||||
     * unloaded from memory.
 | 
			
		||||
     *
 | 
			
		||||
     * @param modelName the name of the model to unload
 | 
			
		||||
     * @throws OllamaBaseException if the response indicates an error status
 | 
			
		||||
     * @throws OllamaException if the response indicates an error status
 | 
			
		||||
     */
 | 
			
		||||
    public void unloadModel(String modelName) throws OllamaBaseException {
 | 
			
		||||
    public void unloadModel(String modelName) throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/generate";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -673,11 +672,11 @@ public class OllamaAPI {
 | 
			
		||||
            }
 | 
			
		||||
            if (statusCode != 200) {
 | 
			
		||||
                LOG.debug("Unload response: {} - {}", statusCode, responseBody);
 | 
			
		||||
                throw new OllamaBaseException(statusCode + " - " + responseBody);
 | 
			
		||||
                throw new OllamaException(statusCode + " - " + responseBody);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            LOG.debug("Unload failed: {} - {}", statusCode, out);
 | 
			
		||||
            throw new OllamaBaseException(statusCode + " - " + out, e);
 | 
			
		||||
            throw new OllamaException(statusCode + " - " + out, e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -689,10 +688,10 @@ public class OllamaAPI {
 | 
			
		||||
     *
 | 
			
		||||
     * @param modelRequest request for '/api/embed' endpoint
 | 
			
		||||
     * @return embeddings
 | 
			
		||||
     * @throws OllamaBaseException if the response indicates an error status
 | 
			
		||||
     * @throws OllamaException if the response indicates an error status
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaEmbedResponseModel embed(OllamaEmbedRequestModel modelRequest)
 | 
			
		||||
            throws OllamaBaseException {
 | 
			
		||||
            throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/embed";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -715,10 +714,10 @@ public class OllamaAPI {
 | 
			
		||||
                return Utils.getObjectMapper()
 | 
			
		||||
                        .readValue(responseBody, OllamaEmbedResponseModel.class);
 | 
			
		||||
            } else {
 | 
			
		||||
                throw new OllamaBaseException(statusCode + " - " + responseBody);
 | 
			
		||||
                throw new OllamaException(statusCode + " - " + responseBody);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, "", false, false, false, null, null, startTime, statusCode, out);
 | 
			
		||||
@@ -732,11 +731,11 @@ public class OllamaAPI {
 | 
			
		||||
     * @param request the generation request
 | 
			
		||||
     * @param streamObserver the stream observer for streaming responses, or null for synchronous
 | 
			
		||||
     * @return the result of the generation
 | 
			
		||||
     * @throws OllamaBaseException if the request fails
 | 
			
		||||
     * @throws OllamaException if the request fails
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaResult generate(
 | 
			
		||||
            OllamaGenerateRequest request, OllamaGenerateStreamObserver streamObserver)
 | 
			
		||||
            throws OllamaBaseException {
 | 
			
		||||
            throws OllamaException {
 | 
			
		||||
        try {
 | 
			
		||||
            if (request.isUseTools()) {
 | 
			
		||||
                return generateWithToolsInternal(request, streamObserver);
 | 
			
		||||
@@ -755,14 +754,14 @@ public class OllamaAPI {
 | 
			
		||||
            }
 | 
			
		||||
            return generateSyncForOllamaRequestModel(request, null, null);
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // (No javadoc for private helper, as is standard)
 | 
			
		||||
    private OllamaResult generateWithToolsInternal(
 | 
			
		||||
            OllamaGenerateRequest request, OllamaGenerateStreamObserver streamObserver)
 | 
			
		||||
            throws OllamaBaseException {
 | 
			
		||||
            throws OllamaException {
 | 
			
		||||
        ArrayList<OllamaChatMessage> msgs = new ArrayList<>();
 | 
			
		||||
        OllamaChatRequest chatRequest = new OllamaChatRequest();
 | 
			
		||||
        chatRequest.setModel(request.getModel());
 | 
			
		||||
@@ -799,10 +798,10 @@ public class OllamaAPI {
 | 
			
		||||
     * @param raw whether to use raw mode
 | 
			
		||||
     * @param think whether to use "think" mode
 | 
			
		||||
     * @return an OllamaAsyncResultStreamer for streaming results
 | 
			
		||||
     * @throws OllamaBaseException if the request fails
 | 
			
		||||
     * @throws OllamaException if the request fails
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaAsyncResultStreamer generateAsync(
 | 
			
		||||
            String model, String prompt, boolean raw, boolean think) throws OllamaBaseException {
 | 
			
		||||
            String model, String prompt, boolean raw, boolean think) throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        String url = "/api/generate";
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
@@ -819,7 +818,7 @@ public class OllamaAPI {
 | 
			
		||||
            statusCode = ollamaAsyncResultStreamer.getHttpStatusCode();
 | 
			
		||||
            return ollamaAsyncResultStreamer;
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    url, model, raw, think, true, null, null, startTime, statusCode, null);
 | 
			
		||||
@@ -836,10 +835,10 @@ public class OllamaAPI {
 | 
			
		||||
     * @param tokenHandler callback handler to handle the last token from stream (caution: the
 | 
			
		||||
     *     previous tokens from stream will not be concatenated)
 | 
			
		||||
     * @return {@link OllamaChatResult}
 | 
			
		||||
     * @throws OllamaBaseException if the response indicates an error status
 | 
			
		||||
     * @throws OllamaException if the response indicates an error status
 | 
			
		||||
     */
 | 
			
		||||
    public OllamaChatResult chat(OllamaChatRequest request, OllamaChatTokenHandler tokenHandler)
 | 
			
		||||
            throws OllamaBaseException {
 | 
			
		||||
            throws OllamaException {
 | 
			
		||||
        try {
 | 
			
		||||
            OllamaChatEndpointCaller requestCaller =
 | 
			
		||||
                    new OllamaChatEndpointCaller(host, auth, requestTimeoutSeconds);
 | 
			
		||||
@@ -909,7 +908,7 @@ public class OllamaAPI {
 | 
			
		||||
            }
 | 
			
		||||
            return result;
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@@ -947,17 +946,17 @@ public class OllamaAPI {
 | 
			
		||||
     * providers. This method scans the caller's class for the {@link OllamaToolService} annotation
 | 
			
		||||
     * and recursively registers annotated tools from all the providers specified in the annotation.
 | 
			
		||||
     *
 | 
			
		||||
     * @throws OllamaBaseException if the caller's class is not annotated with {@link
 | 
			
		||||
     * @throws OllamaException if the caller's class is not annotated with {@link
 | 
			
		||||
     *     OllamaToolService} or if reflection-based instantiation or invocation fails
 | 
			
		||||
     */
 | 
			
		||||
    public void registerAnnotatedTools() throws OllamaBaseException {
 | 
			
		||||
    public void registerAnnotatedTools() throws OllamaException {
 | 
			
		||||
        try {
 | 
			
		||||
            Class<?> callerClass = null;
 | 
			
		||||
            try {
 | 
			
		||||
                callerClass =
 | 
			
		||||
                        Class.forName(Thread.currentThread().getStackTrace()[2].getClassName());
 | 
			
		||||
            } catch (ClassNotFoundException e) {
 | 
			
		||||
                throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
                throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            OllamaToolService ollamaToolServiceAnnotation =
 | 
			
		||||
@@ -975,7 +974,7 @@ public class OllamaAPI {
 | 
			
		||||
                | NoSuchMethodException
 | 
			
		||||
                | IllegalAccessException
 | 
			
		||||
                | InvocationTargetException e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage());
 | 
			
		||||
            throw new OllamaException(e.getMessage());
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@@ -1100,13 +1099,13 @@ public class OllamaAPI {
 | 
			
		||||
     * @param thinkingStreamHandler the stream handler for "thinking" tokens, or null if not used
 | 
			
		||||
     * @param responseStreamHandler the stream handler to process streaming responses, or null for non-streaming requests
 | 
			
		||||
     * @return the result of the Ollama API request
 | 
			
		||||
     * @throws OllamaBaseException if the request fails due to an issue with the Ollama API
 | 
			
		||||
     * @throws OllamaException if the request fails due to an issue with the Ollama API
 | 
			
		||||
     */
 | 
			
		||||
    private OllamaResult generateSyncForOllamaRequestModel(
 | 
			
		||||
            OllamaGenerateRequest ollamaRequestModel,
 | 
			
		||||
            OllamaGenerateTokenHandler thinkingStreamHandler,
 | 
			
		||||
            OllamaGenerateTokenHandler responseStreamHandler)
 | 
			
		||||
            throws OllamaBaseException {
 | 
			
		||||
            throws OllamaException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        int statusCode = -1;
 | 
			
		||||
        Object out = null;
 | 
			
		||||
@@ -1126,7 +1125,7 @@ public class OllamaAPI {
 | 
			
		||||
            out = result;
 | 
			
		||||
            return result;
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            throw new OllamaBaseException(e.getMessage(), e);
 | 
			
		||||
            throw new OllamaException(e.getMessage(), e);
 | 
			
		||||
        } finally {
 | 
			
		||||
            MetricsRecorder.record(
 | 
			
		||||
                    OllamaGenerateEndpointCaller.endpoint,
 | 
			
		||||
 
 | 
			
		||||
@@ -8,13 +8,13 @@
 | 
			
		||||
*/
 | 
			
		||||
package io.github.ollama4j.exceptions;
 | 
			
		||||
 | 
			
		||||
public class OllamaBaseException extends Exception {
 | 
			
		||||
public class OllamaException extends Exception {
 | 
			
		||||
 | 
			
		||||
    public OllamaBaseException(String message) {
 | 
			
		||||
    public OllamaException(String message) {
 | 
			
		||||
        super(message);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaBaseException(String message, Exception exception) {
 | 
			
		||||
    public OllamaException(String message, Exception exception) {
 | 
			
		||||
        super(message, exception);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@@ -10,7 +10,7 @@ package io.github.ollama4j.models.request;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import com.fasterxml.jackson.core.type.TypeReference;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaException;
 | 
			
		||||
import io.github.ollama4j.metrics.MetricsRecorder;
 | 
			
		||||
import io.github.ollama4j.models.chat.*;
 | 
			
		||||
import io.github.ollama4j.models.chat.OllamaChatTokenHandler;
 | 
			
		||||
@@ -82,13 +82,13 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatResult call(OllamaChatRequest body, OllamaChatTokenHandler tokenHandler)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
            throws OllamaException, IOException, InterruptedException {
 | 
			
		||||
        this.tokenHandler = tokenHandler;
 | 
			
		||||
        return callSync(body);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public OllamaChatResult callSync(OllamaChatRequest body)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
            throws OllamaException, IOException, InterruptedException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        HttpClient httpClient = HttpClient.newHttpClient();
 | 
			
		||||
        URI uri = URI.create(getHost() + endpoint);
 | 
			
		||||
@@ -143,7 +143,7 @@ public class OllamaChatEndpointCaller extends OllamaEndpointCaller {
 | 
			
		||||
        if (statusCode != 200) {
 | 
			
		||||
            LOG.error("Status code: {}", statusCode);
 | 
			
		||||
            System.out.println(responseBuffer);
 | 
			
		||||
            throw new OllamaBaseException(responseBuffer.toString());
 | 
			
		||||
            throw new OllamaException(responseBuffer.toString());
 | 
			
		||||
        }
 | 
			
		||||
        if (wantedToolsForStream != null && ollamaChatResponseModel != null) {
 | 
			
		||||
            ollamaChatResponseModel.getMessage().setToolCalls(wantedToolsForStream);
 | 
			
		||||
 
 | 
			
		||||
@@ -9,7 +9,7 @@
 | 
			
		||||
package io.github.ollama4j.models.request;
 | 
			
		||||
 | 
			
		||||
import com.fasterxml.jackson.core.JsonProcessingException;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaException;
 | 
			
		||||
import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
 | 
			
		||||
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
 | 
			
		||||
import io.github.ollama4j.models.generate.OllamaGenerateTokenHandler;
 | 
			
		||||
@@ -67,7 +67,7 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
 | 
			
		||||
            OllamaRequestBody body,
 | 
			
		||||
            OllamaGenerateTokenHandler thinkingStreamHandler,
 | 
			
		||||
            OllamaGenerateTokenHandler responseStreamHandler)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
            throws OllamaException, IOException, InterruptedException {
 | 
			
		||||
        responseStreamObserver =
 | 
			
		||||
                new OllamaGenerateStreamObserver(thinkingStreamHandler, responseStreamHandler);
 | 
			
		||||
        return callSync(body);
 | 
			
		||||
@@ -79,13 +79,13 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
 | 
			
		||||
     *
 | 
			
		||||
     * @param body POST body payload
 | 
			
		||||
     * @return result answer given by the assistant
 | 
			
		||||
     * @throws OllamaBaseException any response code than 200 has been returned
 | 
			
		||||
     * @throws OllamaException any response code than 200 has been returned
 | 
			
		||||
     * @throws IOException in case the responseStream can not be read
 | 
			
		||||
     * @throws InterruptedException in case the server is not reachable or network issues happen
 | 
			
		||||
     */
 | 
			
		||||
    @SuppressWarnings("DuplicatedCode")
 | 
			
		||||
    public OllamaResult callSync(OllamaRequestBody body)
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
            throws OllamaException, IOException, InterruptedException {
 | 
			
		||||
        long startTime = System.currentTimeMillis();
 | 
			
		||||
        HttpClient httpClient = HttpClient.newHttpClient();
 | 
			
		||||
        URI uri = URI.create(getHost() + endpoint);
 | 
			
		||||
@@ -127,7 +127,7 @@ public class OllamaGenerateEndpointCaller extends OllamaEndpointCaller {
 | 
			
		||||
        if (statusCode != 200) {
 | 
			
		||||
            LOG.error("Status code: {}", statusCode);
 | 
			
		||||
            LOG.error("Response: {}", responseBuffer);
 | 
			
		||||
            throw new OllamaBaseException(responseBuffer.toString());
 | 
			
		||||
            throw new OllamaException(responseBuffer.toString());
 | 
			
		||||
        } else {
 | 
			
		||||
            long endTime = System.currentTimeMillis();
 | 
			
		||||
            OllamaResult ollamaResult =
 | 
			
		||||
 
 | 
			
		||||
@@ -8,7 +8,7 @@
 | 
			
		||||
*/
 | 
			
		||||
package io.github.ollama4j.models.response;
 | 
			
		||||
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaException;
 | 
			
		||||
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
 | 
			
		||||
import io.github.ollama4j.models.generate.OllamaGenerateResponseModel;
 | 
			
		||||
import io.github.ollama4j.utils.Constants;
 | 
			
		||||
@@ -146,9 +146,9 @@ public class OllamaAsyncResultStreamer extends Thread {
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            if (statusCode != 200) {
 | 
			
		||||
                throw new OllamaBaseException(this.completeResponse);
 | 
			
		||||
                throw new OllamaException(this.completeResponse);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (IOException | InterruptedException | OllamaBaseException e) {
 | 
			
		||||
        } catch (IOException | InterruptedException | OllamaException e) {
 | 
			
		||||
            this.succeeded = false;
 | 
			
		||||
            this.completeResponse = "[FAILED] " + e.getMessage();
 | 
			
		||||
        }
 | 
			
		||||
 
 | 
			
		||||
@@ -31,7 +31,7 @@ public class Tools {
 | 
			
		||||
        @JsonProperty("function")
 | 
			
		||||
        private ToolSpec toolSpec;
 | 
			
		||||
 | 
			
		||||
        private String type = "function";
 | 
			
		||||
        @Builder.Default private String type = "function";
 | 
			
		||||
        @JsonIgnore private ToolFunction toolFunction;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ package io.github.ollama4j.integrationtests;
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.*;
 | 
			
		||||
 | 
			
		||||
import io.github.ollama4j.OllamaAPI;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaException;
 | 
			
		||||
import io.github.ollama4j.impl.ConsoleOutputChatTokenHandler;
 | 
			
		||||
import io.github.ollama4j.impl.ConsoleOutputGenerateTokenHandler;
 | 
			
		||||
import io.github.ollama4j.models.chat.*;
 | 
			
		||||
@@ -144,7 +144,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
    @Order(1)
 | 
			
		||||
    void shouldThrowConnectExceptionForWrongEndpoint() {
 | 
			
		||||
        OllamaAPI ollamaAPI = new OllamaAPI("http://wrong-host:11434");
 | 
			
		||||
        assertThrows(OllamaBaseException.class, ollamaAPI::listModels);
 | 
			
		||||
        assertThrows(OllamaException.class, ollamaAPI::listModels);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    /**
 | 
			
		||||
@@ -155,7 +155,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(1)
 | 
			
		||||
    void shouldReturnVersionFromVersionAPI() throws OllamaBaseException {
 | 
			
		||||
    void shouldReturnVersionFromVersionAPI() throws OllamaException {
 | 
			
		||||
        String version = api.getVersion();
 | 
			
		||||
        assertNotNull(version);
 | 
			
		||||
    }
 | 
			
		||||
@@ -167,7 +167,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(1)
 | 
			
		||||
    void shouldPingSuccessfully() throws OllamaBaseException {
 | 
			
		||||
    void shouldPingSuccessfully() throws OllamaException {
 | 
			
		||||
        boolean pingResponse = api.ping();
 | 
			
		||||
        assertTrue(pingResponse, "Ping should return true");
 | 
			
		||||
    }
 | 
			
		||||
@@ -179,7 +179,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(2)
 | 
			
		||||
    void shouldListModels() throws OllamaBaseException {
 | 
			
		||||
    void shouldListModels() throws OllamaException {
 | 
			
		||||
        List<Model> models = api.listModels();
 | 
			
		||||
        assertNotNull(models, "Models should not be null");
 | 
			
		||||
        assertTrue(models.size() >= 0, "Models list can be empty or contain elements");
 | 
			
		||||
@@ -200,7 +200,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(3)
 | 
			
		||||
    void shouldPullModelAndListModels() throws OllamaBaseException {
 | 
			
		||||
    void shouldPullModelAndListModels() throws OllamaException {
 | 
			
		||||
        api.pullModel(EMBEDDING_MODEL);
 | 
			
		||||
        List<Model> models = api.listModels();
 | 
			
		||||
        assertNotNull(models, "Models should not be null");
 | 
			
		||||
@@ -215,7 +215,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(4)
 | 
			
		||||
    void shouldGetModelDetails() throws OllamaBaseException {
 | 
			
		||||
    void shouldGetModelDetails() throws OllamaException {
 | 
			
		||||
        api.pullModel(EMBEDDING_MODEL);
 | 
			
		||||
        ModelDetail modelDetails = api.getModelDetails(EMBEDDING_MODEL);
 | 
			
		||||
        assertNotNull(modelDetails);
 | 
			
		||||
@@ -247,7 +247,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(6)
 | 
			
		||||
    void shouldGenerateWithStructuredOutput() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithStructuredOutput() throws OllamaException {
 | 
			
		||||
        api.pullModel(TOOLS_MODEL);
 | 
			
		||||
 | 
			
		||||
        String prompt =
 | 
			
		||||
@@ -294,7 +294,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(6)
 | 
			
		||||
    void shouldGenerateWithDefaultOptions() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithDefaultOptions() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
        boolean raw = false;
 | 
			
		||||
        boolean thinking = false;
 | 
			
		||||
@@ -323,7 +323,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(7)
 | 
			
		||||
    void shouldGenerateWithDefaultOptionsStreamed() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithDefaultOptionsStreamed() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
        boolean raw = false;
 | 
			
		||||
        OllamaGenerateRequest request =
 | 
			
		||||
@@ -355,7 +355,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(8)
 | 
			
		||||
    void shouldGenerateWithCustomOptions() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithCustomOptions() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
@@ -386,7 +386,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(9)
 | 
			
		||||
    void shouldChatWithSystemPrompt() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithSystemPrompt() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
 | 
			
		||||
        String expectedResponse = "Bhai";
 | 
			
		||||
@@ -479,7 +479,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(11)
 | 
			
		||||
    void shouldChatWithExplicitTool() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithExplicitTool() throws OllamaException {
 | 
			
		||||
        String theToolModel = TOOLS_MODEL;
 | 
			
		||||
        api.pullModel(theToolModel);
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
@@ -532,7 +532,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(13)
 | 
			
		||||
    void shouldChatWithExplicitToolAndUseTools() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithExplicitToolAndUseTools() throws OllamaException {
 | 
			
		||||
        String theToolModel = TOOLS_MODEL;
 | 
			
		||||
        api.pullModel(theToolModel);
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
@@ -576,7 +576,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(14)
 | 
			
		||||
    void shouldChatWithToolsAndStream() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithToolsAndStream() throws OllamaException {
 | 
			
		||||
        String theToolModel = TOOLS_MODEL;
 | 
			
		||||
        api.pullModel(theToolModel);
 | 
			
		||||
 | 
			
		||||
@@ -631,7 +631,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(12)
 | 
			
		||||
    void shouldChatWithAnnotatedToolSingleParam() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithAnnotatedToolSingleParam() throws OllamaException {
 | 
			
		||||
        String theToolModel = TOOLS_MODEL;
 | 
			
		||||
        api.pullModel(theToolModel);
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
@@ -678,7 +678,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(13)
 | 
			
		||||
    void shouldChatWithAnnotatedToolMultipleParams() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithAnnotatedToolMultipleParams() throws OllamaException {
 | 
			
		||||
        String theToolModel = TOOLS_MODEL;
 | 
			
		||||
        api.pullModel(theToolModel);
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
@@ -710,7 +710,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(15)
 | 
			
		||||
    void shouldChatWithStream() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithStream() throws OllamaException {
 | 
			
		||||
        api.deregisterTools();
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
@@ -738,7 +738,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(15)
 | 
			
		||||
    void shouldChatWithThinkingAndStream() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithThinkingAndStream() throws OllamaException {
 | 
			
		||||
        api.pullModel(THINKING_TOOL_MODEL_2);
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
                OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL_2);
 | 
			
		||||
@@ -767,8 +767,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(10)
 | 
			
		||||
    void shouldChatWithImageFromURL()
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException {
 | 
			
		||||
    void shouldChatWithImageFromURL() throws OllamaException, IOException, InterruptedException {
 | 
			
		||||
        api.pullModel(VISION_MODEL);
 | 
			
		||||
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
@@ -795,7 +794,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(10)
 | 
			
		||||
    void shouldChatWithImageFromFileAndHistory() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithImageFromFileAndHistory() throws OllamaException {
 | 
			
		||||
        api.pullModel(VISION_MODEL);
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
                OllamaChatRequestBuilder.builder().withModel(VISION_MODEL);
 | 
			
		||||
@@ -857,7 +856,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(18)
 | 
			
		||||
    void shouldGenerateWithImageFiles() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithImageFiles() throws OllamaException {
 | 
			
		||||
        api.pullModel(VISION_MODEL);
 | 
			
		||||
        try {
 | 
			
		||||
            OllamaGenerateRequest request =
 | 
			
		||||
@@ -876,7 +875,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
            assertNotNull(result);
 | 
			
		||||
            assertNotNull(result.getResponse());
 | 
			
		||||
            assertFalse(result.getResponse().isEmpty());
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            fail(e);
 | 
			
		||||
        } catch (IOException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
@@ -891,7 +890,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(20)
 | 
			
		||||
    void shouldGenerateWithImageFilesAndResponseStreamed() throws OllamaBaseException, IOException {
 | 
			
		||||
    void shouldGenerateWithImageFilesAndResponseStreamed() throws OllamaException, IOException {
 | 
			
		||||
        api.pullModel(VISION_MODEL);
 | 
			
		||||
        OllamaGenerateRequest request =
 | 
			
		||||
                OllamaGenerateRequestBuilder.builder()
 | 
			
		||||
@@ -922,7 +921,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(20)
 | 
			
		||||
    void shouldGenerateWithThinking() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithThinking() throws OllamaException {
 | 
			
		||||
        api.pullModel(THINKING_TOOL_MODEL);
 | 
			
		||||
 | 
			
		||||
        boolean raw = false;
 | 
			
		||||
@@ -954,7 +953,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(20)
 | 
			
		||||
    void shouldGenerateWithThinkingAndStreamHandler() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithThinkingAndStreamHandler() throws OllamaException {
 | 
			
		||||
        api.pullModel(THINKING_TOOL_MODEL);
 | 
			
		||||
        boolean raw = false;
 | 
			
		||||
        OllamaGenerateRequest request =
 | 
			
		||||
@@ -990,7 +989,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(21)
 | 
			
		||||
    void shouldGenerateWithRawMode() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithRawMode() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
        api.unloadModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
        boolean raw = true;
 | 
			
		||||
@@ -1020,7 +1019,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(22)
 | 
			
		||||
    void shouldGenerateWithRawModeAndStreaming() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithRawModeAndStreaming() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
        boolean raw = true;
 | 
			
		||||
        OllamaGenerateRequest request =
 | 
			
		||||
@@ -1082,7 +1081,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(24)
 | 
			
		||||
    void shouldGenerateWithAllParametersEnabled() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithAllParametersEnabled() throws OllamaException {
 | 
			
		||||
        api.pullModel(THINKING_TOOL_MODEL);
 | 
			
		||||
        // Settinng raw here instructs to keep the response raw. Even if the model generates
 | 
			
		||||
        // 'thinking' tokens, they will not be received as separate tokens and will be mised with
 | 
			
		||||
@@ -1102,8 +1101,8 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
                        .build();
 | 
			
		||||
        OllamaGenerateStreamObserver handler =
 | 
			
		||||
                new OllamaGenerateStreamObserver(
 | 
			
		||||
                        thinkingToken -> LOG.info("THINKING: {}", thinkingToken),
 | 
			
		||||
                        responseToken -> LOG.info("RESPONSE: {}", responseToken));
 | 
			
		||||
                        thinkingToken -> LOG.info("Thinking token: {}", thinkingToken),
 | 
			
		||||
                        responseToken -> LOG.info("Response token: {}", responseToken));
 | 
			
		||||
        OllamaResult result = api.generate(request, handler);
 | 
			
		||||
        assertNotNull(result);
 | 
			
		||||
        assertNotNull(result.getResponse());
 | 
			
		||||
@@ -1118,7 +1117,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(25)
 | 
			
		||||
    void shouldGenerateWithComplexStructuredOutput() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithComplexStructuredOutput() throws OllamaException {
 | 
			
		||||
        api.pullModel(TOOLS_MODEL);
 | 
			
		||||
 | 
			
		||||
        String prompt =
 | 
			
		||||
@@ -1178,7 +1177,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(26)
 | 
			
		||||
    void shouldChatWithThinkingNoStream() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithThinkingNoStream() throws OllamaException {
 | 
			
		||||
        api.pullModel(THINKING_TOOL_MODEL);
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
                OllamaChatRequestBuilder.builder().withModel(THINKING_TOOL_MODEL);
 | 
			
		||||
@@ -1207,7 +1206,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(27)
 | 
			
		||||
    void shouldChatWithCustomOptionsAndStreaming() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithCustomOptionsAndStreaming() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
@@ -1240,7 +1239,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(28)
 | 
			
		||||
    void shouldChatWithToolsThinkingAndStreaming() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithToolsThinkingAndStreaming() throws OllamaException {
 | 
			
		||||
        api.pullModel(THINKING_TOOL_MODEL_2);
 | 
			
		||||
 | 
			
		||||
        api.registerTool(EmployeeFinderToolSpec.getSpecification());
 | 
			
		||||
@@ -1272,7 +1271,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(31)
 | 
			
		||||
    void shouldChatWithMultipleImages() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithMultipleImages() throws OllamaException {
 | 
			
		||||
        api.pullModel(VISION_MODEL);
 | 
			
		||||
 | 
			
		||||
        List<OllamaChatToolCalls> tools = Collections.emptyList();
 | 
			
		||||
@@ -1318,7 +1317,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
                        .build();
 | 
			
		||||
        OllamaGenerateStreamObserver handler = new OllamaGenerateStreamObserver(null, null);
 | 
			
		||||
        assertThrows(
 | 
			
		||||
                OllamaBaseException.class,
 | 
			
		||||
                OllamaException.class,
 | 
			
		||||
                () -> {
 | 
			
		||||
                    api.generate(request, handler);
 | 
			
		||||
                });
 | 
			
		||||
@@ -1331,7 +1330,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(33)
 | 
			
		||||
    void shouldHandleEmptyMessage() throws OllamaBaseException {
 | 
			
		||||
    void shouldHandleEmptyMessage() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
 | 
			
		||||
        List<OllamaChatToolCalls> tools = Collections.emptyList();
 | 
			
		||||
@@ -1356,7 +1355,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(34)
 | 
			
		||||
    void shouldGenerateWithExtremeParameters() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithExtremeParameters() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
        OllamaGenerateRequest request =
 | 
			
		||||
                OllamaGenerateRequestBuilder.builder()
 | 
			
		||||
@@ -1409,7 +1408,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(36)
 | 
			
		||||
    void shouldChatWithKeepAlive() throws OllamaBaseException {
 | 
			
		||||
    void shouldChatWithKeepAlive() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
 | 
			
		||||
        OllamaChatRequestBuilder builder =
 | 
			
		||||
@@ -1434,7 +1433,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(37)
 | 
			
		||||
    void shouldGenerateWithAdvancedOptions() throws OllamaBaseException {
 | 
			
		||||
    void shouldGenerateWithAdvancedOptions() throws OllamaException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
        OllamaGenerateRequest request =
 | 
			
		||||
                OllamaGenerateRequestBuilder.builder()
 | 
			
		||||
@@ -1468,7 +1467,7 @@ class OllamaAPIIntegrationTest {
 | 
			
		||||
     */
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(38)
 | 
			
		||||
    void shouldHandleConcurrentChatRequests() throws OllamaBaseException, InterruptedException {
 | 
			
		||||
    void shouldHandleConcurrentChatRequests() throws OllamaException, InterruptedException {
 | 
			
		||||
        api.pullModel(GENERAL_PURPOSE_MODEL);
 | 
			
		||||
 | 
			
		||||
        int numThreads = 3;
 | 
			
		||||
@@ -1570,8 +1569,13 @@ class EmployeeFinderToolSpec {
 | 
			
		||||
                                .build())
 | 
			
		||||
                .toolFunction(
 | 
			
		||||
                        arguments -> {
 | 
			
		||||
                            String employeeName = arguments.get("employee-name").toString();
 | 
			
		||||
                            String address = null;
 | 
			
		||||
                            String employeeName = null;
 | 
			
		||||
                            try {
 | 
			
		||||
                                employeeName = arguments.get("employee-name").toString();
 | 
			
		||||
                            } catch (Exception e) {
 | 
			
		||||
                                employeeName = "Mr. LLoyd Llama";
 | 
			
		||||
                            }
 | 
			
		||||
                            try {
 | 
			
		||||
                                address = arguments.get("employee-address").toString();
 | 
			
		||||
                            } catch (Exception e) {
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ package io.github.ollama4j.integrationtests;
 | 
			
		||||
import static org.junit.jupiter.api.Assertions.*;
 | 
			
		||||
 | 
			
		||||
import io.github.ollama4j.OllamaAPI;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaException;
 | 
			
		||||
import io.github.ollama4j.models.generate.OllamaGenerateRequest;
 | 
			
		||||
import io.github.ollama4j.models.generate.OllamaGenerateRequestBuilder;
 | 
			
		||||
import io.github.ollama4j.models.generate.OllamaGenerateStreamObserver;
 | 
			
		||||
@@ -181,7 +181,7 @@ public class WithAuth {
 | 
			
		||||
    @Test
 | 
			
		||||
    @Order(2)
 | 
			
		||||
    void testAskModelWithStructuredOutput()
 | 
			
		||||
            throws OllamaBaseException, IOException, InterruptedException, URISyntaxException {
 | 
			
		||||
            throws OllamaException, IOException, InterruptedException, URISyntaxException {
 | 
			
		||||
        api.setBearerAuth(BEARER_AUTH_TOKEN);
 | 
			
		||||
        String model = GENERAL_PURPOSE_MODEL;
 | 
			
		||||
        api.pullModel(model);
 | 
			
		||||
 
 | 
			
		||||
@@ -13,7 +13,7 @@ import static org.junit.jupiter.api.Assertions.fail;
 | 
			
		||||
import static org.mockito.Mockito.*;
 | 
			
		||||
 | 
			
		||||
import io.github.ollama4j.OllamaAPI;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaBaseException;
 | 
			
		||||
import io.github.ollama4j.exceptions.OllamaException;
 | 
			
		||||
import io.github.ollama4j.exceptions.RoleNotFoundException;
 | 
			
		||||
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
 | 
			
		||||
import io.github.ollama4j.models.embed.OllamaEmbedRequestModel;
 | 
			
		||||
@@ -42,7 +42,7 @@ class TestMockedAPIs {
 | 
			
		||||
            doNothing().when(ollamaAPI).pullModel(model);
 | 
			
		||||
            ollamaAPI.pullModel(model);
 | 
			
		||||
            verify(ollamaAPI, times(1)).pullModel(model);
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@@ -54,7 +54,7 @@ class TestMockedAPIs {
 | 
			
		||||
            when(ollamaAPI.listModels()).thenReturn(new ArrayList<>());
 | 
			
		||||
            ollamaAPI.listModels();
 | 
			
		||||
            verify(ollamaAPI, times(1)).listModels();
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@@ -72,7 +72,7 @@ class TestMockedAPIs {
 | 
			
		||||
            doNothing().when(ollamaAPI).createModel(customModelRequest);
 | 
			
		||||
            ollamaAPI.createModel(customModelRequest);
 | 
			
		||||
            verify(ollamaAPI, times(1)).createModel(customModelRequest);
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@@ -85,7 +85,7 @@ class TestMockedAPIs {
 | 
			
		||||
            doNothing().when(ollamaAPI).deleteModel(model, true);
 | 
			
		||||
            ollamaAPI.deleteModel(model, true);
 | 
			
		||||
            verify(ollamaAPI, times(1)).deleteModel(model, true);
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@@ -112,7 +112,7 @@ class TestMockedAPIs {
 | 
			
		||||
            when(ollamaAPI.getModelDetails(model)).thenReturn(new ModelDetail());
 | 
			
		||||
            ollamaAPI.getModelDetails(model);
 | 
			
		||||
            verify(ollamaAPI, times(1)).getModelDetails(model);
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@@ -129,7 +129,7 @@ class TestMockedAPIs {
 | 
			
		||||
            when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponseModel());
 | 
			
		||||
            ollamaAPI.embed(m);
 | 
			
		||||
            verify(ollamaAPI, times(1)).embed(m);
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@@ -144,7 +144,7 @@ class TestMockedAPIs {
 | 
			
		||||
            when(ollamaAPI.embed(m)).thenReturn(new OllamaEmbedResponseModel());
 | 
			
		||||
            ollamaAPI.embed(m);
 | 
			
		||||
            verify(ollamaAPI, times(1)).embed(m);
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@@ -159,7 +159,7 @@ class TestMockedAPIs {
 | 
			
		||||
                    .thenReturn(new OllamaEmbedResponseModel());
 | 
			
		||||
            ollamaAPI.embed(new OllamaEmbedRequestModel(model, inputs));
 | 
			
		||||
            verify(ollamaAPI, times(1)).embed(new OllamaEmbedRequestModel(model, inputs));
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@@ -184,7 +184,7 @@ class TestMockedAPIs {
 | 
			
		||||
                    .thenReturn(new OllamaResult("", "", 0, 200));
 | 
			
		||||
            ollamaAPI.generate(request, observer);
 | 
			
		||||
            verify(ollamaAPI, times(1)).generate(request, observer);
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@@ -236,7 +236,7 @@ class TestMockedAPIs {
 | 
			
		||||
            when(ollamaAPI.generate(request, handler)).thenReturn(new OllamaResult("", "", 0, 200));
 | 
			
		||||
            ollamaAPI.generate(request, handler);
 | 
			
		||||
            verify(ollamaAPI, times(1)).generate(request, handler);
 | 
			
		||||
        } catch (OllamaBaseException e) {
 | 
			
		||||
        } catch (OllamaException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
        } catch (IOException e) {
 | 
			
		||||
            throw new RuntimeException(e);
 | 
			
		||||
@@ -244,7 +244,7 @@ class TestMockedAPIs {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    @Test
 | 
			
		||||
    void testAskAsync() throws OllamaBaseException {
 | 
			
		||||
    void testAskAsync() throws OllamaException {
 | 
			
		||||
        OllamaAPI ollamaAPI = Mockito.mock(OllamaAPI.class);
 | 
			
		||||
        String model = "llama2";
 | 
			
		||||
        String prompt = "some prompt text";
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user