Skip to content

Commit

Permalink
docs: generate javadoc for classes missing documentation
Browse files Browse the repository at this point in the history
  • Loading branch information
NickChecan committed Feb 6, 2025
1 parent 3817cf1 commit 4a12fb4
Show file tree
Hide file tree
Showing 28 changed files with 1,690 additions and 546 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,20 @@
public abstract class SessionManager {


/**
* Checks if the user is logged in by verifying both SAP and Ollama sessions.
*
* @return {@code true} if the user is logged into either SAP or Ollama, {@code false} otherwise.
*/
public static boolean isUserLoggedIn() {
return (isSapSession() || isOllamaSession()) ? true : false;
}

/**
* Checks if an SAP session is active by verifying required memory components.
*
* @return {@code true} if all required SAP session components are present, {@code false} otherwise.
*/
public static boolean isSapSession() {
MemoryAccessToken memoryAccessToken = MemoryAccessToken.getInstance();
MemoryServiceKey memoryServiceKey = MemoryServiceKey.getInstance();
Expand All @@ -34,6 +44,11 @@ public static boolean isSapSession() {
|| memoryDeployment.isEmpty()) ? false : true;
}

/**
* Checks if an Ollama session is active by verifying required memory components.
*
* @return {@code true} if all required Ollama session components are present, {@code false} otherwise.
*/
public static boolean isOllamaSession() {
MemoryOllamaEndpoint memoryOllamaEndpoint = MemoryOllamaEndpoint.getInstance();
MemoryOllamaModel memoryOllamaModel = MemoryOllamaModel.getInstance();
Expand Down Expand Up @@ -69,18 +84,34 @@ public static void logout(final Browser browser, final EclipseMemory eclipseMemo
}
}

/**
* Clears all active sessions, including both SAP and Ollama sessions.
*/
public static void clearAllSessions() {
clearSapSession();
clearOllamaSession();
}

/**
* Clears all SAP session-related memory components.
* <p>
* This method resets stored access tokens, service keys, resource groups,
* and deployment information associated with the SAP session.
* </p>
*/
private static void clearSapSession() {
MemoryAccessToken.getInstance().clear();
MemoryServiceKey.getInstance().clear();
MemoryResourceGroup.getInstance().clear();
MemoryDeployment.getInstance().clear();
}

/**
* Clears all Ollama session-related memory components.
* <p>
* This method resets stored Ollama endpoint and model information.
* </p>
*/
private static void clearOllamaSession() {
MemoryOllamaEndpoint.getInstance().clear();
MemoryOllamaModel.getInstance().clear();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,16 @@ public static IAIClient getClient() {
}
}

/**
* Creates and returns an {@link IAIClient} for SAP AI Core.
* <p>
* This method initializes the required memory components, authentication client,
* and deployment details. If the deployment's model name matches an OpenAI model,
* an {@link OpenAIClient} is returned. Otherwise, it returns {@code null}.
* </p>
*
* @return an instance of {@link IAIClient} for SAP AI Core, or {@code null} if unsupported.
*/
private static IAIClient getClientForSapAiCore() {
// Load memory components for access token, service key, resource group, deployment, and message history.
MemoryMessageHistory memoryMessageHistory = MemoryMessageHistory.getInstance();
Expand All @@ -71,6 +81,16 @@ private static IAIClient getClientForSapAiCore() {
}
}

/**
* Creates and returns an {@link IAIClient} for Ollama.
* <p>
* This method initializes the required memory components and
* returns an {@link OllamaClient} instance configured with
* an {@link OllamaClientHelper}.
* </p>
*
* @return an instance of {@link OllamaClient}.
*/
private static IAIClient getClientForOllama() {
MemoryMessageHistory memoryMessageHistory = MemoryMessageHistory.getInstance();
IMemoryObject<String> memoryOllamaEndpoint = MemoryOllamaEndpoint.getInstance();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,50 +18,94 @@
import com.developer.nefarious.zjoule.plugin.memory.IMemoryObject;
import com.developer.nefarious.zjoule.plugin.models.Role;

/**
* A client implementation for interacting with the Ollama AI chat system.
* <p>
* The {@code OllamaClient} facilitates chat interactions by sending requests
* to an Ollama chat API endpoint and processing responses. It also manages
* message history using an in-memory storage system.
* </p>
*/
public class OllamaClient implements IAIClient {

private HttpClient httpClient;

private IMemoryMessageHistory memoryMessageHistory;

private IMemoryObject<String> memoryOllamaEndpoint;

private IOllamaClientHelper helper;

public OllamaClient(
final IMemoryMessageHistory memoryMessageHistory,
final IMemoryObject<String> memoryOllamaEndpoint,
final IOllamaClientHelper ollamaClientHelper) {
this.httpClient = HttpClient.newHttpClient();
this.memoryMessageHistory = memoryMessageHistory;
this.memoryOllamaEndpoint = memoryOllamaEndpoint;
helper = ollamaClientHelper;
}

@Override
public IChatMessage chatCompletion(final List<IChatMessage> messages) throws IOException, InterruptedException {
URI endpoint = createChatEndpoint();

BodyPublisher requestBody = helper.createRequestBody(messages);


/** The HTTP client used for sending chat requests. */
private HttpClient httpClient;

/** Memory storage for maintaining chat message history. */
private IMemoryMessageHistory memoryMessageHistory;

/** Memory storage for retrieving the Ollama chat API endpoint. */
private IMemoryObject<String> memoryOllamaEndpoint;

/** Helper class for constructing request payloads and processing responses. */
private IOllamaClientHelper helper;

/**
* Constructs an {@code OllamaClient} with the specified dependencies.
*
* @param memoryMessageHistory the in-memory storage for chat message history.
* @param memoryOllamaEndpoint the in-memory storage containing the Ollama chat API endpoint.
* @param ollamaClientHelper the helper class for handling request and response transformations.
*/
public OllamaClient(
final IMemoryMessageHistory memoryMessageHistory,
final IMemoryObject<String> memoryOllamaEndpoint,
final IOllamaClientHelper ollamaClientHelper) {
this.httpClient = HttpClient.newHttpClient();
this.memoryMessageHistory = memoryMessageHistory;
this.memoryOllamaEndpoint = memoryOllamaEndpoint;
helper = ollamaClientHelper;
}

/**
* Sends a chat completion request to the Ollama API.
* <p>
* This method constructs a request using the provided chat messages and sends
* it to the configured API endpoint. It then processes the response and returns
* the generated AI message.
* </p>
*
* @param messages the list of chat messages forming the conversation history.
* @return the AI-generated response as an {@link IChatMessage}.
* @throws IOException if an I/O error occurs when sending the request.
* @throws InterruptedException if the request is interrupted while waiting for a response.
*/
@Override
public IChatMessage chatCompletion(final List<IChatMessage> messages) throws IOException, InterruptedException {
URI endpoint = createChatEndpoint();
BodyPublisher requestBody = helper.createRequestBody(messages);

HttpRequest request = HttpRequest.newBuilder()
.uri(endpoint)
.POST(requestBody)
.build();
.uri(endpoint)
.POST(requestBody)
.build();

HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());

return helper.convertResponseToObject(response.body());
}
}

@Override
public IChatMessage createMessage(final Role role, final String userPrompt) {
return new OllamaChatMessage(role, userPrompt);
}
/**
* Creates a new chat message with the specified role and user input.
*
* @param role the role of the message (e.g., user or assistant).
* @param userPrompt the message content.
* @return an instance of {@link IChatMessage} representing the user input.
*/
@Override
public IChatMessage createMessage(final Role role, final String userPrompt) {
return new OllamaChatMessage(role, userPrompt);
}

@Override
public List<IChatMessage> getMessageHistory() {
MessageHistory messageHistory = memoryMessageHistory.load();
/**
* Retrieves the chat message history stored in memory.
*
* @return a list of {@link IChatMessage} representing the chat history,
* or an empty list if no history is available.
*/
@Override
public List<IChatMessage> getMessageHistory() {
MessageHistory messageHistory = memoryMessageHistory.load();
if (messageHistory == null) {
return Collections.emptyList();
}
Expand All @@ -72,23 +116,32 @@ public List<IChatMessage> getMessageHistory() {
}

return messages.stream().map(message ->
new OllamaChatMessage(message.getRole(), message.getContent()))
new OllamaChatMessage(message.getRole(), message.getContent()))
.collect(Collectors.toList());
}
}

@Override
public void setMessageHistory(final List<IChatMessage> chatMessages) {
MessageHistory newMessageHistory = new MessageHistory();
/**
* Stores the given chat message history in memory.
*
* @param chatMessages the list of chat messages to save.
*/
@Override
public void setMessageHistory(final List<IChatMessage> chatMessages) {
MessageHistory newMessageHistory = new MessageHistory();
newMessageHistory.setMessages(chatMessages.stream().map(
chatMessage -> new Message(chatMessage.getRole(), chatMessage.getMessage()))
.collect(Collectors.toList()));
chatMessage -> new Message(chatMessage.getRole(), chatMessage.getMessage()))
.collect(Collectors.toList()));
memoryMessageHistory.save(newMessageHistory);
}

private URI createChatEndpoint() {
}

/**
* Creates a URI representing the chat endpoint for sending requests.
*
* @return the {@link URI} of the chat API endpoint.
*/
private URI createChatEndpoint() {
String endpoint = memoryOllamaEndpoint.load();
String endpointInStringFormat = endpoint + "/api/chat";
return URI.create(endpointInStringFormat);
}

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,36 +9,59 @@
import com.developer.nefarious.zjoule.plugin.models.OllamaModel;
import com.google.gson.Gson;

/**
* A helper class for constructing requests and processing responses for the Ollama AI chat system.
* <p>
* The {@code OllamaClientHelper} is responsible for serializing chat request bodies,
* deserializing chat responses, and retrieving the currently selected Ollama model.
* </p>
*/
public class OllamaClientHelper implements IOllamaClientHelper {

private IMemoryObject<OllamaModel> memoryOllamaModel;

public OllamaClientHelper(final IMemoryObject<OllamaModel> memoryOllamaModel) {
this.memoryOllamaModel = memoryOllamaModel;
}

@Override
public IChatMessage convertResponseToObject(final String serializedResponseBody) {

/** In-memory storage for the selected Ollama model. */
private IMemoryObject<OllamaModel> memoryOllamaModel;

/**
* Constructs an {@code OllamaClientHelper} with the specified memory storage.
*
* @param memoryOllamaModel the in-memory storage containing the selected Ollama model.
*/
public OllamaClientHelper(final IMemoryObject<OllamaModel> memoryOllamaModel) {
this.memoryOllamaModel = memoryOllamaModel;
}

/**
* {@inheritDoc}
*/
@Override
public IChatMessage convertResponseToObject(final String serializedResponseBody) {
Gson gson = new Gson();
OllamaRequestResponse deserializedResponseBody = gson.fromJson(serializedResponseBody, OllamaRequestResponse.class);
return deserializedResponseBody.getMessage();
}
}

@Override
public BodyPublisher createRequestBody(final List<IChatMessage> messages) {
/**
* {@inheritDoc}
*/
@Override
public BodyPublisher createRequestBody(final List<IChatMessage> messages) {
OllamaRequestBody requestBody = new OllamaRequestBody();

requestBody.setModel(getSelectedModel());
requestBody.setMessages(messages);
requestBody.setStream(false);

BodyPublisher bodyPublisher = HttpRequest.BodyPublishers.ofString(requestBody.toString());
return HttpRequest.BodyPublishers.fromPublisher(bodyPublisher);
}

private String getSelectedModel() {
OllamaModel ollamaModel = memoryOllamaModel.load();
return ollamaModel.getName();
}
}

/**
* Retrieves the name of the currently selected Ollama model from memory.
*
* @return the name of the selected Ollama model.
*/
private String getSelectedModel() {
OllamaModel ollamaModel = memoryOllamaModel.load();
return ollamaModel.getName();
}
}
Loading

0 comments on commit 4a12fb4

Please sign in to comment.