Added ps() API

Signed-off-by: Amith Koujalgi <koujalgi.amith@gmail.com>
This commit is contained in:
Amith Koujalgi 2024-07-27 20:54:04 +05:30
parent 0af80865c3
commit f9cf11ecdf
3 changed files with 98 additions and 0 deletions

View File

@ -0,0 +1,30 @@
---
sidebar_position: 4
---
# PS
This API provides a list of running models and details about each model currently loaded into memory.
This API corresponds to the [PS](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models) API.
```java
package io.github.ollama4j.localtests;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.ps.ModelsProcessResponse;
import java.io.IOException;
public class Main {
public static void main(String[] args) {
OllamaAPI ollamaAPI = new OllamaAPI("http://localhost:11434");
ModelsProcessResponse response = ollamaAPI.ps();
System.out.println(response);
}
}
```

View File

@ -1,5 +1,6 @@
package io.github.ollama4j; package io.github.ollama4j;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.github.ollama4j.exceptions.OllamaBaseException; import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.exceptions.ToolInvocationException; import io.github.ollama4j.exceptions.ToolInvocationException;
import io.github.ollama4j.exceptions.ToolNotFoundException; import io.github.ollama4j.exceptions.ToolNotFoundException;
@ -12,6 +13,7 @@ import io.github.ollama4j.models.embeddings.OllamaEmbeddingResponseModel;
import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel; import io.github.ollama4j.models.embeddings.OllamaEmbeddingsRequestModel;
import io.github.ollama4j.models.generate.OllamaGenerateRequestModel; import io.github.ollama4j.models.generate.OllamaGenerateRequestModel;
import io.github.ollama4j.models.generate.OllamaStreamHandler; import io.github.ollama4j.models.generate.OllamaStreamHandler;
import io.github.ollama4j.models.ps.ModelsProcessResponse;
import io.github.ollama4j.models.request.*; import io.github.ollama4j.models.request.*;
import io.github.ollama4j.tools.*; import io.github.ollama4j.tools.*;
import io.github.ollama4j.utils.Options; import io.github.ollama4j.utils.Options;
@ -110,6 +112,37 @@ public class OllamaAPI {
return statusCode == 200; return statusCode == 200;
} }
/**
* List models that are currently loaded into memory.
*
* @return ModelsProcessResponse
*/
public ModelsProcessResponse ps() throws IOException, InterruptedException, OllamaBaseException {
String url = this.host + "/api/ps";
HttpClient httpClient = HttpClient.newHttpClient();
HttpRequest httpRequest = null;
try {
httpRequest =
getRequestBuilderDefault(new URI(url))
.header("Accept", "application/json")
.header("Content-type", "application/json")
.GET()
.build();
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
HttpResponse<String> response = null;
response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
int statusCode = response.statusCode();
String responseString = response.body();
if (statusCode == 200) {
return Utils.getObjectMapper()
.readValue(responseString, ModelsProcessResponse.class);
} else {
throw new OllamaBaseException(statusCode + " - " + responseString);
}
}
/** /**
* List available models from Ollama server. * List available models from Ollama server.
* *

View File

@ -0,0 +1,35 @@
package io.github.ollama4j.models.ps;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
public class ModelsProcessResponse {
private List<ModelProcess> models;
@Data
@NoArgsConstructor
public static class ModelProcess {
private String name;
private String model;
private long size;
private String digest;
private ModelDetails details;
private String expiresAt;
private long sizeVram;
}
@Data
@NoArgsConstructor
public static class ModelDetails {
private String parentModel;
private String format;
private String family;
private List<String> families;
private String parameterSize;
private String quantizationLevel;
}
}