Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use of local AI from ad4m embedding, llms & live transcriptions #473

Merged
merged 7 commits into from
Sep 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions app/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
"@capacitor/ios": "^6.0.0",
"@capacitor/push-notifications": "^6.0.1",
"@capacitor/status-bar": "^6.0.0",
"@coasys/ad4m-connect": "0.10.0-rc7.fix.3",
"@coasys/ad4m-vue-hooks": "0.10.0-rc7.fix.3",
"@coasys/ad4m-connect": "0.10.0-rc7.local-ai.1",
"@coasys/ad4m-vue-hooks": "0.10.0-rc7.local-ai.1",
"@coasys/flux-api": "*",
"@coasys/flux-chat-view": "*",
"@coasys/flux-graph-view": "*",
Expand Down
53 changes: 53 additions & 0 deletions app/public/audio-processor.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
class AudioProcessor extends AudioWorkletProcessor {
constructor() {
super();
this.buffer = [];
this.chunkSize = 512;
this.originalSampleRate = sampleRate;
this.targetSampleRate = 16000;
}

downsampleBuffer(buffer, inputSampleRate, outputSampleRate) {
const sampleRateRatio = inputSampleRate / outputSampleRate;
const newLength = Math.round(buffer.length / sampleRateRatio);
const result = new Float32Array(newLength);
let offsetResult = 0;
let offsetBuffer = 0;

while (offsetResult < result.length) {
const nextOffsetBuffer = Math.round((offsetResult + 1) * sampleRateRatio);
let accum = 0, count = 0;
for (let i = offsetBuffer; i < nextOffsetBuffer && i < buffer.length; i++) {
accum += buffer[i];
count++;
}
result[offsetResult] = accum / count;
offsetResult++;
offsetBuffer = nextOffsetBuffer;
}
return result;
}

process(inputs, outputs, parameters) {
const input = inputs[0];
if (input.length > 0) {
const channelData = input[0];
const downsampledData = this.downsampleBuffer(channelData, this.originalSampleRate, this.targetSampleRate);

// Append the downsampled data to the buffer
this.buffer.push(...downsampledData);

// When the buffer reaches the chunk size, send the data
if (this.buffer.length >= this.chunkSize) {
const float32ArrayToSend = new Float32Array(this.buffer.slice(0, this.chunkSize));
this.port.postMessage(float32ArrayToSend);

// Remove the sent data from the buffer
this.buffer = this.buffer.slice(this.chunkSize);
}
}
return true;
}
}

registerProcessor('audio-processor', AudioProcessor);
2 changes: 1 addition & 1 deletion packages/api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
"uuid": "^9.0.0"
},
"peerDependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3"
"@coasys/ad4m": "0.10.0-rc7.local-ai.1"
},
"keywords": [],
"author": "",
Expand Down
4 changes: 2 additions & 2 deletions packages/create/templates/preact/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@
"license": "ISC",
"description": "",
"dependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/flux-api": "0.9.0",
"@coasys/flux-container": "0.9.0",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.fix.3",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.local-ai.1",
"preact": "^10.13.1"
},
"devDependencies": {
Expand Down
4 changes: 2 additions & 2 deletions packages/flux-container/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
"publish-package": "npm publish"
},
"dependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m-connect": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/ad4m-connect": "0.10.0-rc7.local-ai.1",
"@coasys/flux-api": "*",
"@coasys/flux-ui": "*",
"lit-element": "^2.4.0"
Expand Down
4 changes: 2 additions & 2 deletions packages/flux-editor/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
"publish-package": "npm publish"
},
"dependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m-connect": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/ad4m-connect": "0.10.0-rc7.local-ai.1",
"@coasys/flux-api": "*",
"@coasys/flux-ui": "*",
"@tiptap/core": "^2.0.3",
Expand Down
2 changes: 1 addition & 1 deletion packages/react-web/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"@coasys/flux-webrtc": "*"
},
"peerDependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"react": "*"
}
}
4 changes: 2 additions & 2 deletions packages/utils/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
"openai": "^4.55.5"
},
"peerDependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m-connect": "0.10.0-rc7.fix.3"
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/ad4m-connect": "0.10.0-rc7.local-ai.1"
},
"keywords": [],
"author": "",
Expand Down
113 changes: 78 additions & 35 deletions packages/utils/src/synergy.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { LinkQuery } from "@coasys/ad4m";
import { Ad4mClient, Ad4mSignalCB, AITask, LinkQuery, PerspectiveProxy } from "@coasys/ad4m";
import { getAd4mClient } from "@coasys/ad4m-connect/utils";
import {
Conversation,
ConversationSubgroup,
Expand Down Expand Up @@ -43,24 +44,18 @@ async function removeProcessedData(perspective, itemId) {
]);
}

async function generateEmbedding(text: string) {
const embeddingWorker = new Worker(new URL("./embeddingWorker.ts", import.meta.url));
return new Promise((resolve) => {
embeddingWorker.postMessage({
type: "embed",
text,
messageId: new Date().getTime().toString(),
});
embeddingWorker.onmessage = (e) => {
if (e.data.type === "embed") resolve(e.data.embedding);
};
});
export async function generateEmbedding(text: string) {
const client = await getAd4mClient();

const embedding = await client.ai.embed("berd", text);

return embedding;
}

async function saveEmbedding(perspective, itemId, embedding) {
const { EMBEDDING_VECTOR_LANGUAGE } = languages;
const embeddingExpression = await perspective.createExpression(
{ model: "TaylorAI/gte-tiny", data: embedding },
{ model: "berd", data: embedding },
EMBEDDING_VECTOR_LANGUAGE
);
return await perspective.add({
Expand All @@ -84,16 +79,18 @@ async function getConversationData(perspective, conversationRepo) {
source: latestConversation.id,
});
latestSubgroups = (await subgroupRepo.getAllData()) as any;
const latestSubgroup = latestSubgroups[latestSubgroups.length - 1] as any;
const subgroupItems = await getSubgroupItems(perspective, latestSubgroup.id);
// calculate time since last item was created
const lastItemTimestamp = subgroupItems[subgroupItems.length - 1].timestamp;
const minsSinceLastItemCreated =
(new Date().getTime() - new Date(lastItemTimestamp).getTime()) / (1000 * 60);
if (minsSinceLastItemCreated < 30) {
// if less than 30 mins, consider the new item part of the latest conversation
conversation = latestConversation;
latestSubgroupItems = subgroupItems;
if (latestSubgroups.length) {
const latestSubgroup = latestSubgroups[latestSubgroups.length - 1] as any;
const subgroupItems = await getSubgroupItems(perspective, latestSubgroup.id);
// calculate time since last item was created
const lastItemTimestamp = subgroupItems[subgroupItems.length - 1].timestamp;
const minsSinceLastItemCreated =
(new Date().getTime() - new Date(lastItemTimestamp).getTime()) / (1000 * 60);
if (minsSinceLastItemCreated < 30) {
// if less than 30 mins, consider the new item part of the latest conversation
conversation = latestConversation;
latestSubgroupItems = subgroupItems;
}
}
}
if (!conversation) {
Expand Down Expand Up @@ -141,7 +138,7 @@ async function LLMProcessing(newItem, latestSubgroups, latestSubgroupItems, allT
const prompt = `
I'm passing you a JSON object with the following properties: 'lastGroupings' (string block broken up into sections by line breaks <br/>), 'lastMessages' (string array), 'newMessage' (string), and 'existingTopics' (string array).

{ lastGroupings: [${latestSubgroups.map((s) => s.summary).join(" <br/> ")}], lastMessages: [${latestSubgroupItems.map((si) => si.text).join(", ")}], newMessage: '${newItem.text}', existingTopics: [${allTopics.map((t) => t.name).join(", ")}] }
{ lastGroupings: [], lastMessages: [], newMessage: 'Some text', existingTopics: [] }

Firstly, analyze the 'newMessage' string and identify between 1 and 5 topics (each a single word string in lowercase) that are relevant to the content of the 'newMessage' string. If any of the topics you choose are similar to topics listed in the 'existingTopics' array, use the existing topic instead of creating a new one (e.g., if one of the new topics you picked was 'foods' and you find an existing topic 'food', use 'food' instead of creating a new topic that is just a plural version of the existing topic). For each topic, provide a relevance score between 0 and 100 (0 being irrelevant and 100 being highly relevant) that indicates how relevant the topic is to the content of the 'newMessage' string.

Expand Down Expand Up @@ -172,16 +169,62 @@ async function LLMProcessing(newItem, latestSubgroups, latestSubgroupItems, allT
Make sure the response is in a format that can be parsed using JSON.parse(). Don't wrap it in code syntax.
`;

const openai = new OpenAI({
apiKey: localStorage?.getItem("openAIKey") || "",
dangerouslyAllowBrowser: true,
});
const result = await openai.chat.completions.create({
messages: [{ role: "user", content: prompt }],
model: "gpt-4o",
});
const data = JSON.parse(result.choices[0].message.content || "");
console.log("Open AI response: ", data);
const examples = [{
input: `{ lastGroupings: [], lastMessages: [], newMessage: 'hello world', existingTopics: [greeting] }`,
output: `{"topics":[{"name":"greeting","relevance":100}],"changedSubject":true,"newSubgroupName":"Hello World","newSubgroupSummary":"The conversation starts with a simple greeting: 'hello world'.","newConversationName":"Hello World","newConversationSummary":"The conversation starts with a simple greeting: 'hello world'."}`,
}, {
input: `{ lastGroupings: [The conversation starts with a simple greeting: 'hello world'.], lastMessages: [<p>hello world</p><p></p>], newMessage: 'another hello 2', existingTopics: [greeting] }`,
output: `{"topics":[{"name":"hello","relevance":80},{"name":"greeting","relevance":70}],"changedSubject":false,"newSubgroupName":"More Greetings","newSubgroupSummary":"The conversation continues with another greeting, showing the ongoing exchange of pleasantries.","newConversationName":"Simple Greetings","newConversationSummary":"The conversation starts with a simple greeting: 'hello world'. Following this, another greeting is exchanged, indicating the continuation of pleasantries."}`,
}, {
input: `{ lastGroupings: [The conversation continues with another greeting, showing the ongoing exchange of pleasantries.], lastMessages: [<p>hello world</p><p></p>, <p>another hello 2</p><p></p>], newMessage: 'game talk here', existingTopics: [greeting, hello] }`,
output: `{"topics":[{"name":"game","relevance":100},{"name":"talk","relevance":80}],"changedSubject":true,"newSubgroupName":"Game Talk","newSubgroupSummary":"The conversation introduces a new topic with a focus on discussing games.","newConversationName":"Exchange of Pleasantries and Game Talk","newConversationSummary":"The conversation continues with another greeting, showing the ongoing exchange of pleasantries. The conversation then introduces a new topic with a focus on discussing games."}`
}, {
input: `{ lastGroupings: [The conversation continues with another greeting, showing the ongoing exchange of pleasantries. <br/> The conversation introduces a new topic with a focus on discussing games.], lastMessages: [<p>game talk here</p><p></p>], newMessage: 'dota 2 is the biggest esport game there is', existingTopics: [greeting, hello, game, talk] }`,
output: `{"topics":[{"name":"game","relevance":90},{"name":"esport","relevance":85},{"name":"dota","relevance":100}],"changedSubject":false,"newSubgroupName":"Dota 2 Discussion","newSubgroupSummary":"The conversation continues with a focus on Dota 2, highlighting its prominence in the esports scene.","newConversationName":"Games and Esports","newConversationSummary":"The conversation continues with another greeting, showing the ongoing exchange of pleasantries. The conversation introduces a new topic with a focus on discussing games. The latest discussion centers on Dota 2, highlighting its significance in the world of esports."}`
}]

const client: Ad4mClient = await getAd4mClient();

const tasks = await client.ai.tasks();

let task = tasks.find((t) => t.systemPrompt.includes("I'm passing you a JSON object with the following properties: 'lastGroupings'"));

console.log("Task: ", task);

if (task) {
// task.promptExamples = examples;
// task.systemPrompt = prompt;
// await client.ai.updateTask(task.taskId, task);
} else {
task = await client.ai.addTask("llama", prompt, examples)
}

console.log("Task: calling prompt");

const response = await client.ai.prompt(task.taskId, `{ lastGroupings: [${latestSubgroups.map((s) => s.summary).join(" <br/> ")}], lastMessages: [${latestSubgroupItems.map((si) => si.text).join(", ")}], newMessage: '${newItem.text}', existingTopics: [${allTopics.map((t) => t.name).join(", ")}] }`);

console.log("AI Response: ", response);

// const openai = new OpenAI({
// apiKey: localStorage?.getItem("openAIKey") || "",
// dangerouslyAllowBrowser: true,
// });
// const result = await openai.chat.completions.create({
// messages: [{ role: "user", content: prompt }],
// model: "gpt-4o",
// });
// console.log("Open AI request: ", `{ lastGroupings: [${latestSubgroups.map((s) => s.summary).join(" <br/> ")}], lastMessages: [${latestSubgroupItems.map((si) => si.text).join(", ")}], newMessage: '${newItem.text}', existingTopics: [${allTopics.map((t) => t.name).join(", ")}] }`);
// console.log("Open AI response: ", JSON.parse(response), JSON.parse(result.choices[0].message.content));
// const data = JSON.parse(result.choices[0].message.content || "");

const data = JSON.parse(response);
if (!data.newConversationName) data.newConversationName = ""
if (!data.newConversationSummary) data.newConversationSummary = ""
if (!data.newSubgroupName) data.newSubgroupName = ""
if (!data.newSubgroupSummary) data.newSubgroupSummary = ""
if (!data.topics) data.topics = []
if (!data.changedSubject) data.changedSubject = false
console.log("LLM Processing Data: ", data);
return data;
}

Expand Down
2 changes: 1 addition & 1 deletion packages/vue/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"@coasys/flux-utils": "0.9.1"
},
"peerDependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"vue": "^3.2.47"
}
}
2 changes: 1 addition & 1 deletion packages/webrtc/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"author": "",
"license": "ISC",
"dependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"simple-peer": "^9.11.1"
}
}
4 changes: 2 additions & 2 deletions views/chat-view/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"license": "ISC",
"description": "",
"dependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/flux-api": "*",
"@coasys/flux-constants": "*",
"@coasys/flux-container": "*",
Expand All @@ -42,7 +42,7 @@
"@coasys/flux-types": "*",
"@coasys/flux-ui": "*",
"@coasys/flux-utils": "0.9.1",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.fix.3",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.local-ai.1",
"preact": "^10.13.1",
"react": "*",
"react-virtuoso": "^4.3.6"
Expand Down
2 changes: 1 addition & 1 deletion views/graph-view/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
"description": "",
"dependencies": {
"3d-force-graph": "^1.70.19",
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/flux-container": "*",
"preact": "^10.11.3",
"three-spritetext": "^1.6.5"
Expand Down
4 changes: 2 additions & 2 deletions views/kanban-view/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,12 @@
"license": "ISC",
"description": "",
"dependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/flux-api": "*",
"@coasys/flux-comment-section": "*",
"@coasys/flux-container": "*",
"@coasys/flux-types": "*",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.fix.3",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.local-ai.1",
"fractional-indexing": "^3.2.0",
"preact": "^10.13.1",
"react-beautiful-dnd": "^13.1.1"
Expand Down
4 changes: 2 additions & 2 deletions views/post-view/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,13 @@
"author": "",
"license": "ISC",
"dependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/flux-api": "*",
"@coasys/flux-comment-section": "*",
"@coasys/flux-editor": "*",
"@coasys/flux-ui": "*",
"@coasys/flux-utils": "0.9.1",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.fix.3",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.local-ai.1",
"classnames": "^2.3.2",
"date-fns": "^2.24.0",
"emoji-picker-element": "^1.14.0",
Expand Down
4 changes: 2 additions & 2 deletions views/synergy-demo-view/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@
"license": "ISC",
"description": "",
"dependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/flux-utils": "0.9.1",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.fix.3",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.local-ai.1",
"@coasys/flux-api": "*",
"@coasys/flux-webrtc-view": "0.8.1-fix.2",
"preact": "^10.13.1"
Expand Down
4 changes: 2 additions & 2 deletions views/table-view/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,11 @@
"license": "ISC",
"description": "",
"dependencies": {
"@coasys/ad4m": "0.10.0-rc7.fix.3",
"@coasys/ad4m": "0.10.0-rc7.local-ai.1",
"@coasys/flux-api": "*",
"@coasys/flux-comment-section": "*",
"@coasys/flux-container": "*",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.fix.3",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.local-ai.1",
"preact": "^10.13.1",
"uuid": "^9.0.0"
},
Expand Down
2 changes: 1 addition & 1 deletion views/webrtc-debug/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
"@coasys/flux-container": "*",
"@coasys/flux-utils": "0.9.1",
"@coasys/flux-webrtc": "*",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.fix.3",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.local-ai.1",
"classnames": "^2.3.2",
"date-fns": "^2.29.3",
"emoji-picker-element": "^1.14.0",
Expand Down
2 changes: 1 addition & 1 deletion views/webrtc-view/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
"@coasys/flux-container": "*",
"@coasys/flux-webrtc": "*",
"@coasys/flux-utils": "0.9.1",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.fix.3",
"@coasys/ad4m-react-hooks": "0.10.0-rc7.local-ai.1",
"@types/simple-peer": "^9.11.8",
"@xenova/transformers": "^2.17.2",
"classnames": "^2.3.2",
Expand Down
Loading
Loading