@@ -10,7 +10,7 @@ import {
10
10
} from "./chat-history" ;
11
11
import { useStepwiseServerConfiguration } from "@/hooks/useVersion" ;
12
12
import { useAuth0 } from "@auth0/auth0-react" ;
13
- import { LLMSelector , useLLMSelectorStore } from "./llm-selector" ;
13
+ import { LLMSelector , OpenAI_LLM , useLLMSelectorStore } from "./llm-selector" ;
14
14
import { useOpenAIConfiguration } from "./openai-configure-card" ;
15
15
import OpenAI from "openai" ;
16
16
import Image from "next/image" ;
@@ -41,13 +41,8 @@ export const ChatControlBar: React.FC = () => {
41
41
const message = useChatBoxStore ( ( state ) => state . message ) ;
42
42
const chatHistory = useChatHistoryStore ( ( state ) => state . messages ) ;
43
43
const selectedLLM = useLLMSelectorStore ( ( state ) => state . selectedLLM ) ;
44
- const claudeLLMs = useClaudeConfiguration ( ( state ) => state . LLMTypes ) ;
45
- const openaiLLMs = useOpenAIConfiguration ( ( state ) => state . LLMTypes ) ;
46
- const openAIApiKey = useOpenAIConfiguration ( ( state ) => state . apiKey ) ;
47
- const claudeApiKey = useClaudeConfiguration ( ( state ) => state . apiKey ) ;
48
44
const setMessage = useChatBoxStore ( ( state ) => state . setMessage ) ;
49
45
const addMessage = useChatHistoryStore ( ( state ) => state . addMessage ) ;
50
- const deleteMessage = useChatHistoryStore ( ( state ) => state . deleteMessage ) ;
51
46
const configuration = useStepwiseServerConfiguration ( ) ;
52
47
const [ busy , setBusy ] = React . useState ( false ) ;
53
48
const { user } = useAuth0 ( ) ;
@@ -68,6 +63,11 @@ export const ChatControlBar: React.FC = () => {
68
63
stepRunHistory : StepRunDTO [ ] ,
69
64
chatHistory : ChatMessageContent [ ] ,
70
65
) => {
66
+ if ( selectedLLM === undefined ) {
67
+ toast . error ( "Please select a language model" ) ;
68
+ return ;
69
+ }
70
+
71
71
if ( message !== "" ) {
72
72
let userMessage : ChatMessage ;
73
73
if ( configuration ?. enableAuth0Authentication ) {
@@ -106,6 +106,8 @@ You are a helpful workflow assistant. Your name is ${llmName}.
106
106
107
107
You are currently assisting user with the workflow ${ workflow . name } . You can either invoke the workflow or provide assistance with the steps in the workflow.
108
108
109
+ When invoking a step in workflow, you don't need to consider whether it's pre-requisite steps are executed or not. The workflow engine will take care of it. So you can directly invoke the step.
110
+
109
111
Each workflow is associated with a context which contains the intermediate results of the workflow execution.
110
112
111
113
## current context:
116
118
. map ( ( v ) => `${ v . result ?. name } : ${ v . result ?. displayValue } ` )
117
119
. join ( "\n" )
118
120
}
121
+
122
+ You don't need to provide the arguments if they are already available in the context. You can override the context variables by providing the arguments explicitly.
119
123
` ;
120
124
121
125
const steps = workflow . steps ;
122
- if ( openaiLLMs . find ( ( f ) => f === selectedLLM ) && openAIApiKey ) {
126
+ if (
127
+ selectedLLM ?. type === "OpenAI" &&
128
+ ( selectedLLM as OpenAI_LLM ) . apiKey
129
+ ) {
123
130
const openAIClient = new OpenAI ( {
124
- apiKey : openAIApiKey ,
131
+ apiKey : ( selectedLLM as OpenAI_LLM ) . apiKey ,
125
132
dangerouslyAllowBrowser : true ,
126
133
} ) ;
127
134
147
154
id : msg . id ,
148
155
function : {
149
156
name : msg . name ,
150
- arguments : msg . arguments ,
157
+ arguments : msg . argument ,
151
158
} as ChatCompletionMessageToolCall . Function ,
152
159
} ,
153
160
] as ChatCompletionMessageToolCall [ ] ,
175
182
"Number" ,
176
183
"Boolean" ,
177
184
"String[]" ,
178
- "Integer" ,
185
+ "Int32" ,
186
+ "Int64" ,
179
187
"Float" ,
180
188
"Double" ,
181
189
] ;
187
195
Boolean : "boolean" ,
188
196
"String[]" : "array" ,
189
197
Integer : "integer" ,
198
+ Int32 : "integer" ,
199
+ Int64 : "integer" ,
190
200
Float : "number" ,
191
201
Double : "number" ,
192
202
} ;
198
208
Boolean : undefined ,
199
209
"String[]" : "string" ,
200
210
Integer : undefined ,
211
+ Int32 : undefined ,
212
+ Int64 : undefined ,
201
213
Float : undefined ,
202
214
Double : undefined ,
203
215
} ;
242
254
const chatCompletion =
243
255
await openAIClient . chat . completions . create ( {
244
256
messages : [ systemMessage , ...openAIChatHistory ] ,
245
- model : selectedLLM as ChatModel ,
257
+ model : ( selectedLLM as OpenAI_LLM ) . modelId ,
246
258
tool_choice : "auto" ,
247
259
tools : tools ,
248
260
parallel_tool_calls : false ,
296
308
} )
297
309
. filter ( ( v ) => v !== undefined ) ;
298
310
311
+ console . log ( argumentsArray ) ;
312
+
299
313
// merge the arguments with the context variables
300
- // remove the context variables that are overriden by the arguments
301
- const mergedVariables = argumentsArray . filter (
314
+ // and override the context variables with the arguments
315
+ const mergedVariables = contextVariables . filter (
302
316
( v ) =>
303
- ! contextVariables . find (
317
+ ! argumentsArray . find (
304
318
( a ) => a . result ?. name === v . result ?. name ,
305
319
) ,
306
320
) ;
309
323
type : "tool" ,
310
324
id : tool . id ,
311
325
name : toolName ,
312
- arguments : argumentJson ,
326
+ argument : argumentJson ,
313
327
displayValue : "" ,
314
328
values : [ ] ,
315
329
isExecuting : true ,
316
330
} ;
317
331
addMessage ( toolMessage ) ;
318
332
const newStepRunHistory = await executeStep ( step , [
319
- ...contextVariables ,
320
333
...mergedVariables ,
334
+ ...argumentsArray ,
321
335
] ) ;
322
336
323
337
if ( newStepRunHistory . length > 0 ) {
452
466
chatHistory ,
453
467
)
454
468
}
455
- disabled = { busy || message === "" }
469
+ disabled = { busy || message === "" || selectedLLM === undefined }
456
470
tooltip = "Send message (Ctrl + Enter)"
457
471
>
458
472
< SendHorizonal />
0 commit comments