Skip to content

Commit 1308324

Browse files
committed
remove unused messageAddedEvents variable
1 parent a01902d commit 1308324

File tree

1 file changed

+12
-18
lines changed
  • x-pack/solutions/observability/test/api_integration_deployment_agnostic/apis/ai_assistant/complete

1 file changed

+12
-18
lines changed

x-pack/solutions/observability/test/api_integration_deployment_agnostic/apis/ai_assistant/complete/complete.spec.ts

Lines changed: 12 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -615,7 +615,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
615615
});
616616

617617
describe('when the LLM calls a tool that is not available', function () {
618-
let messageAddedEvents: MessageAddEvent[];
619618
let fullConversation: Conversation;
620619
before(async () => {
621620
void proxy.interceptTitle('LLM-generated title');
@@ -631,14 +630,12 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
631630

632631
void proxy.interceptWithResponse('Hello from LLM Proxy, again!');
633632

634-
const { messageAddedEvents: messageAddedEventsResponse, conversationCreateEvent } =
635-
await chatComplete({
636-
userPrompt: 'user prompt test spec',
637-
connectorId,
638-
persist: true,
639-
observabilityAIAssistantAPIClient,
640-
});
641-
messageAddedEvents = messageAddedEventsResponse;
633+
const { conversationCreateEvent } = await chatComplete({
634+
userPrompt: 'user prompt test spec',
635+
connectorId,
636+
persist: true,
637+
observabilityAIAssistantAPIClient,
638+
});
642639

643640
await proxy.waitForAllInterceptorsToHaveBeenCalled();
644641

@@ -725,7 +722,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
725722
});
726723

727724
describe('when the LLM calls a tool with invalid arguments', function () {
728-
let messageAddedEvents: MessageAddEvent[];
729725
let fullConversation: Conversation;
730726
before(async () => {
731727
void proxy.interceptTitle('LLM-generated title');
@@ -742,14 +738,12 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
742738
void proxy.interceptWithResponse('I will not call the kibana function!');
743739
void proxy.interceptWithResponse('Hello from LLM Proxy, again!');
744740

745-
const { messageAddedEvents: messageAddedEventsResponse, conversationCreateEvent } =
746-
await chatComplete({
747-
userPrompt: 'user prompt test spec',
748-
connectorId,
749-
persist: true,
750-
observabilityAIAssistantAPIClient,
751-
});
752-
messageAddedEvents = messageAddedEventsResponse;
741+
const { conversationCreateEvent } = await chatComplete({
742+
userPrompt: 'user prompt test spec',
743+
connectorId,
744+
persist: true,
745+
observabilityAIAssistantAPIClient,
746+
});
753747

754748
await proxy.waitForAllInterceptorsToHaveBeenCalled();
755749

0 commit comments

Comments
 (0)