Skip to content

Commit d8419d7

Browse files
authored
LLM Powered Recipe Suggestions (#98)
* It's... alive? * Use vite dev proxy to solve CORS issue (for now) * Add documentation and update thread API * Expand README * Log a TODO * Disambiguate name of "Thread" So as to avoid collision with good old CPU threads * Update tests
1 parent 2987782 commit d8419d7

File tree

14 files changed

+588
-146
lines changed

14 files changed

+588
-146
lines changed

typescript/package-lock.json

Lines changed: 2 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

typescript/packages/llm-client/src/index.ts

Lines changed: 89 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,69 @@ export interface ClientConfig {
1010
system?: string;
1111
}
1212

13+
export class ConversationThread {
14+
private pendingToolCalls: Anthropic.Messages.ToolUseBlockParam[] = [];
15+
16+
constructor(
17+
private client: LLMClient,
18+
public id: string,
19+
public conversation: string[] = [],
20+
) {
21+
this.client = client;
22+
this.id = id;
23+
this.conversation = conversation;
24+
}
25+
26+
async sendMessage(message: string): Promise<string> {
27+
let response: AppendThreadResponse;
28+
29+
if (this.pendingToolCalls) {
30+
const toolResponses = await this.handleToolCalls(this.pendingToolCalls);
31+
response = await this.client.continueThread(
32+
this.id,
33+
undefined,
34+
toolResponses,
35+
);
36+
} else {
37+
response = await this.client.continueThread(this.id, message);
38+
}
39+
40+
this.conversation.push(`User: ${message}`);
41+
let assistantResponse = "";
42+
43+
if (response.pendingToolCalls && response.pendingToolCalls.length > 0) {
44+
this.pendingToolCalls = response.pendingToolCalls;
45+
assistantResponse = (
46+
response.assistantResponse?.content[0] as { text: string }
47+
).text;
48+
} else {
49+
assistantResponse = response.output || "";
50+
this.pendingToolCalls = [];
51+
}
52+
53+
this.conversation.push(`Assistant: ${assistantResponse}`);
54+
return assistantResponse;
55+
}
56+
57+
private async handleToolCalls(
58+
toolCalls: Anthropic.Messages.ToolUseBlockParam[],
59+
): Promise<ToolResponse[]> {
60+
return await Promise.all(
61+
toolCalls.map(async (toolCall) => ({
62+
type: "tool_result",
63+
tool_use_id: toolCall.id,
64+
content: [
65+
{ type: "text", text: await this.client.executeTool(toolCall) },
66+
],
67+
})),
68+
);
69+
}
70+
71+
hasPendingToolCalls(): boolean {
72+
return this.pendingToolCalls !== null;
73+
}
74+
}
75+
1376
export class LLMClient {
1477
private serverUrl: string;
1578
private tools: Tool[];
@@ -31,32 +94,49 @@ export class LLMClient {
3194
});
3295

3396
if (!response.ok) {
34-
throw new Error(`HTTP error! status: ${response.status}`);
97+
const errorText = await response.text();
98+
throw new Error(
99+
`HTTP error! status: ${response.status}, body: ${errorText}`,
100+
);
35101
}
36102

37103
return await response.json();
38104
}
39105

40-
async createThread(message: string): Promise<CreateThreadResponse> {
106+
async createThread(message: string): Promise<ConversationThread> {
41107
const request: CreateThreadRequest = {
42108
action: "create",
43109
system: this.system,
44110
message,
45-
activeTools: this.tools.map(({ implementation, ...tool }) => ({
46-
...tool,
47-
})),
111+
activeTools: this.tools.map(({ implementation, ...tool }) => tool),
48112
};
49113

50-
return await this.sendRequest(request);
114+
const response: CreateThreadResponse = await this.sendRequest(request);
115+
const thread = new ConversationThread(this, response.threadId);
116+
117+
const initialAssistantResponse = (
118+
response.assistantResponse.content[0] as { text: string }
119+
).text;
120+
thread.conversation.push(`User: ${message}`);
121+
thread.conversation.push(`Assistant: ${initialAssistantResponse}`);
122+
123+
if (response.pendingToolCalls && response.pendingToolCalls.length > 0) {
124+
// Instead of handling tool calls here, we set them as pending in the Thread
125+
(thread as any).pendingToolCalls = response.pendingToolCalls;
126+
}
127+
128+
return thread;
51129
}
52130

53131
async continueThread(
54132
threadId: string,
55-
toolResponses: ToolResponse[],
133+
message?: string,
134+
toolResponses: ToolResponse[] = [],
56135
): Promise<AppendThreadResponse> {
57136
const request: AppendThreadRequest = {
58137
action: "append",
59138
threadId,
139+
message,
60140
toolResponses,
61141
};
62142

@@ -67,45 +147,10 @@ export class LLMClient {
67147
toolCall: Anthropic.Messages.ToolUseBlockParam,
68148
): Promise<string> {
69149
const tool = this.tools.find((t) => t.name === toolCall.name);
70-
console.log("Tool call:", toolCall.name, toolCall.input);
71-
72150
if (!tool) {
73151
throw new Error(`Tool not found: ${toolCall.name}`);
74152
}
75-
const result = await tool.implementation(toolCall.input);
76-
console.log("Tool result:", result);
77-
return result;
78-
}
79-
80-
async handleConversation(initialMessage: string): Promise<string[]> {
81-
const conversation: string[] = [];
82-
conversation.push(`User: ${initialMessage}`);
83-
84-
let thread: CreateThreadResponse | AppendThreadResponse =
85-
await this.createThread(initialMessage);
86-
conversation.push(
87-
`Assistant: ${(thread.assistantResponse.content[0] as { text: string }).text}`,
88-
);
89-
90-
while (thread.pendingToolCalls && thread.pendingToolCalls.length > 0) {
91-
const toolResponses: ToolResponse[] = await Promise.all(
92-
thread.pendingToolCalls.map(async (toolCall) => ({
93-
type: "tool_result",
94-
tool_use_id: toolCall.id,
95-
content: [{ type: "text", text: await this.executeTool(toolCall) }],
96-
})),
97-
);
98-
99-
// console.info("Tool responses", toolResponses);
100-
thread = await this.continueThread(thread.threadId, toolResponses);
101-
102-
if (thread.output) {
103-
conversation.push(`Assistant: ${thread.output}`);
104-
break;
105-
}
106-
}
107-
108-
return conversation;
153+
return await tool.implementation(toolCall.input);
109154
}
110155
}
111156

@@ -120,6 +165,7 @@ interface CreateThreadRequest {
120165
interface AppendThreadRequest {
121166
action: "append";
122167
threadId: string;
168+
message?: string;
123169
toolResponses: ToolResponse[];
124170
}
125171

typescript/packages/lookslike-high-level/package.json

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
"private": true,
88
"type": "module",
99
"scripts": {
10+
"dev": "npx vite",
1011
"build": "wireit",
1112
"clean": "wireit"
1213
},
@@ -28,7 +29,8 @@
2829
"dependencies": [
2930
"../common-ui:build",
3031
"../common-frp:build",
31-
"../lookslike-sagas:build"
32+
"../lookslike-sagas:build",
33+
"../llm-client:build"
3234
],
3335
"command": "vite build"
3436
},
@@ -39,6 +41,7 @@
3941
"dependencies": {
4042
"@commontools/common-ui": "^0.0.1",
4143
"@commontools/common-frp": "^0.0.1",
42-
"@commontools/lookslike-sagas": "^0.0.1"
44+
"@commontools/lookslike-sagas": "^0.0.1",
45+
"@commontools/llm-client": "^0.0.1"
4346
}
4447
}
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
import { LLMClient } from "@commontools/llm-client";
2+
export const LLM_SERVER_URL =
3+
window.location.protocol + "//" + window.location.host + "/api/llm";
4+
5+
export const suggestionClient = new LLMClient({
6+
serverUrl: LLM_SERVER_URL,
7+
system:
8+
"You are an assistant that helps match user queries to relevant data gems based on their names and types.",
9+
tools: [],
10+
});

typescript/packages/lookslike-high-level/src/main.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import { home } from "./recipes/home.js";
66

77
document.addEventListener("DOMContentLoaded", () => {
88
const windowManager = document.getElementById(
9-
"window-manager"
9+
"window-manager",
1010
)! as CommonWindowManager;
1111
windowManager.openSaga(home({ sagas: dataGems, recipes }));
1212
});

0 commit comments

Comments
 (0)