mirror of
https://github.com/amithkoujalgi/ollama4j.git
synced 2025-05-15 20:07:10 +02:00
Updated default request timeout to 10 seconds
This commit is contained in:
parent
e4e717b747
commit
04124cf978
@ -112,7 +112,7 @@ You will get a response similar to:
|
|||||||
|
|
||||||
## Use a simple Console Output Stream Handler
|
## Use a simple Console Output Stream Handler
|
||||||
|
|
||||||
```
|
```java
|
||||||
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
|
import io.github.amithkoujalgi.ollama4j.core.impl.ConsoleOutputStreamHandler;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
@ -9,18 +9,13 @@ import io.github.amithkoujalgi.ollama4j.core.models.chat.OllamaChatResult;
|
|||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingResponseModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.embeddings.OllamaEmbeddingsRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
import io.github.amithkoujalgi.ollama4j.core.models.generate.OllamaGenerateRequestModel;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFileContentsRequest;
|
import io.github.amithkoujalgi.ollama4j.core.models.request.*;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.CustomModelFilePathRequest;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.ModelRequest;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaChatEndpointCaller;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.request.OllamaGenerateEndpointCaller;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Options;
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
import java.io.BufferedReader;
|
import org.slf4j.Logger;
|
||||||
import java.io.File;
|
import org.slf4j.LoggerFactory;
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
import java.io.*;
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.http.HttpClient;
|
import java.net.http.HttpClient;
|
||||||
@ -33,16 +28,16 @@ import java.time.Duration;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
/** The base Ollama API class. */
|
/**
|
||||||
|
* The base Ollama API class.
|
||||||
|
*/
|
||||||
@SuppressWarnings("DuplicatedCode")
|
@SuppressWarnings("DuplicatedCode")
|
||||||
public class OllamaAPI {
|
public class OllamaAPI {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
|
private static final Logger logger = LoggerFactory.getLogger(OllamaAPI.class);
|
||||||
private final String host;
|
private final String host;
|
||||||
private long requestTimeoutSeconds = 3;
|
private long requestTimeoutSeconds = 10;
|
||||||
private boolean verbose = true;
|
private boolean verbose = true;
|
||||||
private BasicAuth basicAuth;
|
private BasicAuth basicAuth;
|
||||||
|
|
||||||
@ -323,7 +318,7 @@ public class OllamaAPI {
|
|||||||
* @param modelRequest request for '/api/embeddings' endpoint
|
* @param modelRequest request for '/api/embeddings' endpoint
|
||||||
* @return embeddings
|
* @return embeddings
|
||||||
*/
|
*/
|
||||||
public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException{
|
public List<Double> generateEmbeddings(OllamaEmbeddingsRequestModel modelRequest) throws IOException, InterruptedException, OllamaBaseException {
|
||||||
URI uri = URI.create(this.host + "/api/embeddings");
|
URI uri = URI.create(this.host + "/api/embeddings");
|
||||||
String jsonData = modelRequest.toString();
|
String jsonData = modelRequest.toString();
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
@ -360,17 +355,17 @@ public class OllamaAPI {
|
|||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
|
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt);
|
||||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
|
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convenience method to call Ollama API without streaming responses.
|
* Convenience method to call Ollama API without streaming responses.
|
||||||
*
|
* <p>
|
||||||
* Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
|
* Uses {@link #generate(String, String, Options, OllamaStreamHandler)}
|
||||||
*/
|
*/
|
||||||
public OllamaResult generate(String model, String prompt, Options options)
|
public OllamaResult generate(String model, String prompt, Options options)
|
||||||
throws OllamaBaseException, IOException, InterruptedException {
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
return generate(model, prompt, options,null);
|
return generate(model, prompt, options, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -415,19 +410,19 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
|
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
|
||||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
|
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convenience method to call Ollama API without streaming responses.
|
* Convenience method to call Ollama API without streaming responses.
|
||||||
*
|
* <p>
|
||||||
* Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
|
* Uses {@link #generateWithImageFiles(String, String, List, Options, OllamaStreamHandler)}
|
||||||
*/
|
*/
|
||||||
public OllamaResult generateWithImageFiles(
|
public OllamaResult generateWithImageFiles(
|
||||||
String model, String prompt, List<File> imageFiles, Options options)
|
String model, String prompt, List<File> imageFiles, Options options)
|
||||||
throws OllamaBaseException, IOException, InterruptedException{
|
throws OllamaBaseException, IOException, InterruptedException {
|
||||||
return generateWithImageFiles(model, prompt, imageFiles, options, null);
|
return generateWithImageFiles(model, prompt, imageFiles, options, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* With one or more image URLs, ask a question to a model running on Ollama server. This is a
|
* With one or more image URLs, ask a question to a model running on Ollama server. This is a
|
||||||
@ -451,12 +446,12 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
|
OllamaGenerateRequestModel ollamaRequestModel = new OllamaGenerateRequestModel(model, prompt, images);
|
||||||
ollamaRequestModel.setOptions(options.getOptionsMap());
|
ollamaRequestModel.setOptions(options.getOptionsMap());
|
||||||
return generateSyncForOllamaRequestModel(ollamaRequestModel,streamHandler);
|
return generateSyncForOllamaRequestModel(ollamaRequestModel, streamHandler);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convenience method to call Ollama API without streaming responses.
|
* Convenience method to call Ollama API without streaming responses.
|
||||||
*
|
* <p>
|
||||||
* Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
|
* Uses {@link #generateWithImageURLs(String, String, List, Options, OllamaStreamHandler)}
|
||||||
*/
|
*/
|
||||||
public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs,
|
public OllamaResult generateWithImageURLs(String model, String prompt, List<String> imageURLs,
|
||||||
@ -466,7 +461,6 @@ public class OllamaAPI {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api
|
* Ask a question to a model based on a given message stack (i.e. a chat history). Creates a synchronous call to the api
|
||||||
* 'api/chat'.
|
* 'api/chat'.
|
||||||
@ -478,14 +472,14 @@ public class OllamaAPI {
|
|||||||
* @throws IOException in case the responseStream can not be read
|
* @throws IOException in case the responseStream can not be read
|
||||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
*/
|
*/
|
||||||
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException{
|
public OllamaChatResult chat(String model, List<OllamaChatMessage> messages) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model);
|
OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance(model);
|
||||||
return chat(builder.withMessages(messages).build());
|
return chat(builder.withMessages(messages).build());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||||
*
|
* <p>
|
||||||
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
||||||
*
|
*
|
||||||
* @param request request object to be sent to the server
|
* @param request request object to be sent to the server
|
||||||
@ -494,13 +488,13 @@ public class OllamaAPI {
|
|||||||
* @throws IOException in case the responseStream can not be read
|
* @throws IOException in case the responseStream can not be read
|
||||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
*/
|
*/
|
||||||
public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException{
|
public OllamaChatResult chat(OllamaChatRequestModel request) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
return chat(request,null);
|
return chat(request, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
* Ask a question to a model using an {@link OllamaChatRequestModel}. This can be constructed using an {@link OllamaChatRequestBuilder}.
|
||||||
*
|
* <p>
|
||||||
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
* Hint: the OllamaChatRequestModel#getStream() property is not implemented.
|
||||||
*
|
*
|
||||||
* @param request request object to be sent to the server
|
* @param request request object to be sent to the server
|
||||||
@ -510,14 +504,13 @@ public class OllamaAPI {
|
|||||||
* @throws IOException in case the responseStream can not be read
|
* @throws IOException in case the responseStream can not be read
|
||||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
*/
|
*/
|
||||||
public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException{
|
public OllamaChatResult chat(OllamaChatRequestModel request, OllamaStreamHandler streamHandler) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
|
OllamaChatEndpointCaller requestCaller = new OllamaChatEndpointCaller(host, basicAuth, requestTimeoutSeconds, verbose);
|
||||||
OllamaResult result;
|
OllamaResult result;
|
||||||
if(streamHandler != null){
|
if (streamHandler != null) {
|
||||||
request.setStream(true);
|
request.setStream(true);
|
||||||
result = requestCaller.call(request, streamHandler);
|
result = requestCaller.call(request, streamHandler);
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
result = requestCaller.callSync(request);
|
result = requestCaller.callSync(request);
|
||||||
}
|
}
|
||||||
return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
|
return new OllamaChatResult(result.getResponse(), result.getResponseTime(), result.getHttpStatusCode(), request.getMessages());
|
||||||
|
@ -1,5 +1,15 @@
|
|||||||
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
package io.github.amithkoujalgi.ollama4j.core.models.request;
|
||||||
|
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
||||||
|
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
@ -12,17 +22,6 @@ import java.nio.charset.StandardCharsets;
|
|||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.OllamaAPI;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.exceptions.OllamaBaseException;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.BasicAuth;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaErrorResponseModel;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.models.OllamaResult;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.OllamaRequestBody;
|
|
||||||
import io.github.amithkoujalgi.ollama4j.core.utils.Utils;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abstract helperclass to call the ollama api server.
|
* Abstract helperclass to call the ollama api server.
|
||||||
*/
|
*/
|
||||||
@ -56,8 +55,7 @@ public abstract class OllamaEndpointCaller {
|
|||||||
* @throws IOException in case the responseStream can not be read
|
* @throws IOException in case the responseStream can not be read
|
||||||
* @throws InterruptedException in case the server is not reachable or network issues happen
|
* @throws InterruptedException in case the server is not reachable or network issues happen
|
||||||
*/
|
*/
|
||||||
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException{
|
public OllamaResult callSync(OllamaRequestBody body) throws OllamaBaseException, IOException, InterruptedException {
|
||||||
|
|
||||||
// Create Request
|
// Create Request
|
||||||
long startTime = System.currentTimeMillis();
|
long startTime = System.currentTimeMillis();
|
||||||
HttpClient httpClient = HttpClient.newHttpClient();
|
HttpClient httpClient = HttpClient.newHttpClient();
|
||||||
@ -71,7 +69,6 @@ public abstract class OllamaEndpointCaller {
|
|||||||
HttpResponse<InputStream> response =
|
HttpResponse<InputStream> response =
|
||||||
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
httpClient.send(request, HttpResponse.BodyHandlers.ofInputStream());
|
||||||
|
|
||||||
|
|
||||||
int statusCode = response.statusCode();
|
int statusCode = response.statusCode();
|
||||||
InputStream responseBodyStream = response.body();
|
InputStream responseBodyStream = response.body();
|
||||||
StringBuilder responseBuffer = new StringBuilder();
|
StringBuilder responseBuffer = new StringBuilder();
|
||||||
@ -96,7 +93,7 @@ public abstract class OllamaEndpointCaller {
|
|||||||
OllamaErrorResponseModel.class);
|
OllamaErrorResponseModel.class);
|
||||||
responseBuffer.append(ollamaResponseModel.getError());
|
responseBuffer.append(ollamaResponseModel.getError());
|
||||||
} else {
|
} else {
|
||||||
boolean finished = parseResponseAndAddToBuffer(line,responseBuffer);
|
boolean finished = parseResponseAndAddToBuffer(line, responseBuffer);
|
||||||
if (finished) {
|
if (finished) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user