Class: Groq
Unified language model interface
Extends
Constructors
new Groq()
new Groq(
init
?):Groq
Parameters
• init?: Partial
<OpenAI
> & object
Returns
Overrides
Defined in
packages/llm/groq/dist/index.d.ts:5
Properties
additionalChatOptions?
optional
additionalChatOptions:OpenAIAdditionalChatOptions
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:232
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"maxRetries"
|"timeout"
>
Inherited from
OpenAI
.additionalSessionOptions
Defined in
packages/llm/openai/dist/index.d.ts:237
apiKey?
optional
apiKey:string
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:233
maxRetries
maxRetries:
number
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:234
maxTokens?
optional
maxTokens:number
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:231
model
model:
string
&object
|ChatModel
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:228
session
session:
OpenAISession
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:236
temperature
temperature:
number
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:229
timeout?
optional
timeout:number
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:235
topP
topP:
number
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:230
Accessors
metadata
get
metadata():LLMMetadata
Returns
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:242
supportToolCall
get
supportToolCall():boolean
Returns
boolean
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:241
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:245
chat(params)
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsNonStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:246
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
,any
,any
>>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
, any
, any
>>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:168
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:169
streamChat()
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>
Parameters
• baseRequestParams: ChatCompletionCreateParams
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:247
toOpenAIMessage()
static
toOpenAIMessage(messages
):ChatCompletionMessageParam
[]
Parameters
• messages: ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
ChatCompletionMessageParam
[]
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:244
toOpenAIRole()
static
toOpenAIRole(messageType
):ChatCompletionRole
Parameters
• messageType: MessageType
Returns
ChatCompletionRole
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:243
toTool()
static
toTool(tool
):ChatCompletionTool
Parameters
• tool: BaseTool
<any
>
Returns
ChatCompletionTool
Inherited from
Defined in
packages/llm/openai/dist/index.d.ts:248