@@ -18,17 +18,15 @@ import type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager
1818import type { OrchestrationCallOptions } from './types.js' ;
1919import type { HttpDestinationOrFetchOptions } from '@sap-cloud-sdk/connectivity' ;
2020
21- // TODO: Update all docs
22-
2321/**
24- * LangChain chat client for Azure OpenAI consumption on SAP BTP .
22+ * The Orchestration client.
2523 */
2624export class OrchestrationClient extends BaseChatModel <
2725 OrchestrationCallOptions ,
2826 OrchestrationMessageChunk
2927> {
3028 constructor (
31- // Omit streaming until supported
29+ // TODO: Omit streaming until supported
3230 public orchestrationConfig : Omit < OrchestrationModuleConfig , 'streaming' > ,
3331 public langchainOptions : BaseChatModelParams = { } ,
3432 public deploymentConfig ?: ResourceGroupConfig ,
@@ -45,8 +43,8 @@ export class OrchestrationClient extends BaseChatModel<
4543 * Decisions:
4644 * bind only supports ParsedCallOptions, we don't support arbitrary LLM options, only tool calls & default BaseLanguageModelCallOptions, e.g. stop ✅
4745 * this aligns with other vendors' client designs (e.g. openai, google) ✅
48- * top of the array (array[array.length - 1]) contains the current message, everything before then is history.
49- * Module results are part of our own message type, which extends AI Message to work with all other langchain functionality.
46+ * inputParams are a seperate call option, history = history ✅
47+ * Module results are part of our own message type, which extends AI Message to work with all other langchain functionality. ✅.
5048 *
5149 * For timeout, we need to apply our own middleware, it is not handled by langchain. ✅.
5250 */
@@ -71,7 +69,6 @@ export class OrchestrationClient extends BaseChatModel<
7169 signal : options . signal
7270 } ,
7371 ( ) => {
74- // consider this.tools & this.stop property, merge it with template orchestration config
7572 const { inputParams } = options ;
7673 const mergedOrchestrationConfig =
7774 this . mergeOrchestrationConfig ( options ) ;
@@ -88,8 +85,6 @@ export class OrchestrationClient extends BaseChatModel<
8885 } ;
8986 return orchestrationClient . chatCompletion (
9087 {
91- // how to handle tools here? doesn't really exist as input in orchestration as message history
92- // make template a call option, to merge it ??
9388 messagesHistory,
9489 inputParams
9590 } ,
@@ -100,7 +95,7 @@ export class OrchestrationClient extends BaseChatModel<
10095
10196 const content = res . getContent ( ) ;
10297
103- // we currently do not support streaming
98+ // TODO: Add streaming as soon as we support it
10499 await runManager ?. handleLLMNewToken (
105100 typeof content === 'string' ? content : ''
106101 ) ;
0 commit comments