@@ -10,6 +10,69 @@ export interface ClientConfig {
1010 system ?: string ;
1111}
1212
13+ export class ConversationThread {
14+ private pendingToolCalls : Anthropic . Messages . ToolUseBlockParam [ ] = [ ] ;
15+
16+ constructor (
17+ private client : LLMClient ,
18+ public id : string ,
19+ public conversation : string [ ] = [ ] ,
20+ ) {
21+ this . client = client ;
22+ this . id = id ;
23+ this . conversation = conversation ;
24+ }
25+
26+ async sendMessage ( message : string ) : Promise < string > {
27+ let response : AppendThreadResponse ;
28+
29+ if ( this . pendingToolCalls ) {
30+ const toolResponses = await this . handleToolCalls ( this . pendingToolCalls ) ;
31+ response = await this . client . continueThread (
32+ this . id ,
33+ undefined ,
34+ toolResponses ,
35+ ) ;
36+ } else {
37+ response = await this . client . continueThread ( this . id , message ) ;
38+ }
39+
40+ this . conversation . push ( `User: ${ message } ` ) ;
41+ let assistantResponse = "" ;
42+
43+ if ( response . pendingToolCalls && response . pendingToolCalls . length > 0 ) {
44+ this . pendingToolCalls = response . pendingToolCalls ;
45+ assistantResponse = (
46+ response . assistantResponse ?. content [ 0 ] as { text : string }
47+ ) . text ;
48+ } else {
49+ assistantResponse = response . output || "" ;
50+ this . pendingToolCalls = [ ] ;
51+ }
52+
53+ this . conversation . push ( `Assistant: ${ assistantResponse } ` ) ;
54+ return assistantResponse ;
55+ }
56+
57+ private async handleToolCalls (
58+ toolCalls : Anthropic . Messages . ToolUseBlockParam [ ] ,
59+ ) : Promise < ToolResponse [ ] > {
60+ return await Promise . all (
61+ toolCalls . map ( async ( toolCall ) => ( {
62+ type : "tool_result" ,
63+ tool_use_id : toolCall . id ,
64+ content : [
65+ { type : "text" , text : await this . client . executeTool ( toolCall ) } ,
66+ ] ,
67+ } ) ) ,
68+ ) ;
69+ }
70+
71+ hasPendingToolCalls ( ) : boolean {
72+ return this . pendingToolCalls !== null ;
73+ }
74+ }
75+
1376export class LLMClient {
1477 private serverUrl : string ;
1578 private tools : Tool [ ] ;
@@ -31,32 +94,49 @@ export class LLMClient {
3194 } ) ;
3295
3396 if ( ! response . ok ) {
34- throw new Error ( `HTTP error! status: ${ response . status } ` ) ;
97+ const errorText = await response . text ( ) ;
98+ throw new Error (
99+ `HTTP error! status: ${ response . status } , body: ${ errorText } ` ,
100+ ) ;
35101 }
36102
37103 return await response . json ( ) ;
38104 }
39105
40- async createThread ( message : string ) : Promise < CreateThreadResponse > {
106+ async createThread ( message : string ) : Promise < ConversationThread > {
41107 const request : CreateThreadRequest = {
42108 action : "create" ,
43109 system : this . system ,
44110 message,
45- activeTools : this . tools . map ( ( { implementation, ...tool } ) => ( {
46- ...tool ,
47- } ) ) ,
111+ activeTools : this . tools . map ( ( { implementation, ...tool } ) => tool ) ,
48112 } ;
49113
50- return await this . sendRequest ( request ) ;
114+ const response : CreateThreadResponse = await this . sendRequest ( request ) ;
115+ const thread = new ConversationThread ( this , response . threadId ) ;
116+
117+ const initialAssistantResponse = (
118+ response . assistantResponse . content [ 0 ] as { text : string }
119+ ) . text ;
120+ thread . conversation . push ( `User: ${ message } ` ) ;
121+ thread . conversation . push ( `Assistant: ${ initialAssistantResponse } ` ) ;
122+
123+ if ( response . pendingToolCalls && response . pendingToolCalls . length > 0 ) {
124+ // Instead of handling tool calls here, we set them as pending in the Thread
125+ ( thread as any ) . pendingToolCalls = response . pendingToolCalls ;
126+ }
127+
128+ return thread ;
51129 }
52130
53131 async continueThread (
54132 threadId : string ,
55- toolResponses : ToolResponse [ ] ,
133+ message ?: string ,
134+ toolResponses : ToolResponse [ ] = [ ] ,
56135 ) : Promise < AppendThreadResponse > {
57136 const request : AppendThreadRequest = {
58137 action : "append" ,
59138 threadId,
139+ message,
60140 toolResponses,
61141 } ;
62142
@@ -67,45 +147,10 @@ export class LLMClient {
67147 toolCall : Anthropic . Messages . ToolUseBlockParam ,
68148 ) : Promise < string > {
69149 const tool = this . tools . find ( ( t ) => t . name === toolCall . name ) ;
70- console . log ( "Tool call:" , toolCall . name , toolCall . input ) ;
71-
72150 if ( ! tool ) {
73151 throw new Error ( `Tool not found: ${ toolCall . name } ` ) ;
74152 }
75- const result = await tool . implementation ( toolCall . input ) ;
76- console . log ( "Tool result:" , result ) ;
77- return result ;
78- }
79-
80- async handleConversation ( initialMessage : string ) : Promise < string [ ] > {
81- const conversation : string [ ] = [ ] ;
82- conversation . push ( `User: ${ initialMessage } ` ) ;
83-
84- let thread : CreateThreadResponse | AppendThreadResponse =
85- await this . createThread ( initialMessage ) ;
86- conversation . push (
87- `Assistant: ${ ( thread . assistantResponse . content [ 0 ] as { text : string } ) . text } ` ,
88- ) ;
89-
90- while ( thread . pendingToolCalls && thread . pendingToolCalls . length > 0 ) {
91- const toolResponses : ToolResponse [ ] = await Promise . all (
92- thread . pendingToolCalls . map ( async ( toolCall ) => ( {
93- type : "tool_result" ,
94- tool_use_id : toolCall . id ,
95- content : [ { type : "text" , text : await this . executeTool ( toolCall ) } ] ,
96- } ) ) ,
97- ) ;
98-
99- // console.info("Tool responses", toolResponses);
100- thread = await this . continueThread ( thread . threadId , toolResponses ) ;
101-
102- if ( thread . output ) {
103- conversation . push ( `Assistant: ${ thread . output } ` ) ;
104- break ;
105- }
106- }
107-
108- return conversation ;
153+ return await tool . implementation ( toolCall . input ) ;
109154 }
110155}
111156
@@ -120,6 +165,7 @@ interface CreateThreadRequest {
120165interface AppendThreadRequest {
121166 action : "append" ;
122167 threadId : string ;
168+ message ?: string ;
123169 toolResponses : ToolResponse [ ] ;
124170}
125171
0 commit comments