LLMRequest: {
abortSignal?: AbortSignal;
maxTokens?: number;
messages: LanguageModelV1Prompt;
temperature?: number;
toolChoice?: LanguageModelV1ToolChoice;
tools?: LanguageModelV1FunctionTool[];
topK?: number;
topP?: number;
}
Type declaration
Optional
abortSignal?: AbortSignal
Optional
maxTokens?: number
messages: LanguageModelV1Prompt
Optional
temperature?: number
Optional
toolChoice?: LanguageModelV1ToolChoice
Optional
tools?: LanguageModelV1FunctionTool[]
Optional
topK?: number
Optional
topP?: number