diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index ac2e375cb7789..1f834910ba330 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1261,7 +1261,7 @@ packages/kbn-monaco/src/esql @elastic/kibana-esql /x-pack/test/observability_ai_assistant_functional @elastic/obs-ai-assistant /x-pack/test_serverless/**/test_suites/observability/ai_assistant @elastic/obs-ai-assistant /x-pack/test/functional/es_archives/observability/ai_assistant @elastic/obs-ai-assistant - +/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant @elastic/obs-ai-assistant # Infra Obs ## This plugin mostly contains the codebase for the infra services, but also includes some code for the Logs UI app. ## To keep @elastic/obs-ux-logs-team as codeowner of the plugin manifest without requiring a review for all the other code changes diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/chat/chat.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/chat/chat.spec.ts similarity index 64% rename from x-pack/test/observability_ai_assistant_api_integration/tests/chat/chat.spec.ts rename to x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/chat/chat.spec.ts index 04139a574a216..d46489c42f18c 100644 --- a/x-pack/test/observability_ai_assistant_api_integration/tests/chat/chat.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/chat/chat.spec.ts @@ -8,17 +8,16 @@ import expect from '@kbn/expect'; import { MessageRole, type Message } from '@kbn/observability-ai-assistant-plugin/common'; import { PassThrough } from 'stream'; -import { createLlmProxy, LlmProxy } from '../../common/create_llm_proxy'; -import { FtrProviderContext } from '../../common/ftr_provider_context'; -import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; -import { ForbiddenApiError } from '../../common/config'; - -export default function ApiTest({ getService }: FtrProviderContext) { - const supertest = getService('supertest'); +import { + LlmProxy, + createLlmProxy, +} from '../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { SupertestWithRoleScope } from '../../../../services/role_scoped_supertest'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { const log = getService('log'); - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); - - const CHAT_API_URL = `/internal/observability_ai_assistant/chat`; + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); const messages: Message[] = [ { @@ -37,37 +36,50 @@ export default function ApiTest({ getService }: FtrProviderContext) { }, ]; - describe('/internal/observability_ai_assistant/chat', () => { + describe('/internal/observability_ai_assistant/chat', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); let proxy: LlmProxy; let connectorId: string; before(async () => { proxy = await createLlmProxy(log); - connectorId = await createProxyActionConnector({ supertest, log, port: proxy.getPort() }); + connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ + port: proxy.getPort(), + }); }); after(async () => { proxy.close(); - await deleteActionConnector({ supertest, connectorId, log }); + await observabilityAIAssistantAPIClient.deleteActionConnector({ + actionId: connectorId, + }); }); it("returns a 4xx if the connector doesn't exist", async () => { - await supertest - .post(CHAT_API_URL) - .set('kbn-xsrf', 'foo') - .send({ - name: 'my_api_call', - messages, - connectorId: 'does not exist', - functions: [], - scopes: ['all'], - }) - .expect(404); + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/chat', + params: { + body: { + name: 'my_api_call', + messages, + connectorId: 'does not exist', + functions: [], + scopes: ['all'], + }, + }, + }); + expect(status).to.be(404); }); - it('returns a streaming response from the server', async () => { const NUM_RESPONSES = 5; + const roleScopedSupertest = getService('roleScopedSupertest'); + const supertestEditorWithCookieCredentials: SupertestWithRoleScope = + await roleScopedSupertest.getSupertestWithRoleScope('editor', { + useCookieHeader: true, + withInternalHeaders: true, + }); await Promise.race([ new Promise((resolve, reject) => { @@ -81,9 +93,8 @@ export default function ApiTest({ getService }: FtrProviderContext) { const receivedChunks: Array> = []; const passThrough = new PassThrough(); - supertest - .post(CHAT_API_URL) - .set('kbn-xsrf', 'foo') + supertestEditorWithCookieCredentials + .post('/internal/observability_ai_assistant/chat') .on('error', reject) .send({ name: 'my_api_call', @@ -136,26 +147,21 @@ export default function ApiTest({ getService }: FtrProviderContext) { }), ]); }); - describe('security roles and access privileges', () => { it('should deny access for users without the ai_assistant privilege', async () => { - try { - await observabilityAIAssistantAPIClient.unauthorizedUser({ - endpoint: `POST ${CHAT_API_URL}`, - params: { - body: { - name: 'my_api_call', - messages, - connectorId, - functions: [], - scopes: ['all'], - }, + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'POST /internal/observability_ai_assistant/chat', + params: { + body: { + name: 'my_api_call', + messages, + connectorId, + functions: [], + scopes: ['all'], }, - }); - throw new ForbiddenApiError('Expected unauthorizedUser() to throw a 403 Forbidden error'); - } catch (e) { - expect(e.status).to.be(403); - } + }, + }); + expect(status).to.be(403); }); }); }); diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/complete.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/complete.spec.ts new file mode 100644 index 0000000000000..cfa49502b153c --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/complete.spec.ts @@ -0,0 +1,550 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import { Response } from 'supertest'; +import { MessageRole, type Message } from '@kbn/observability-ai-assistant-plugin/common'; +import { omit, pick } from 'lodash'; +import { PassThrough } from 'stream'; +import expect from '@kbn/expect'; +import { + ChatCompletionChunkEvent, + ConversationCreateEvent, + ConversationUpdateEvent, + MessageAddEvent, + StreamingChatResponseEvent, + StreamingChatResponseEventType, +} from '@kbn/observability-ai-assistant-plugin/common/conversation_complete'; +import { ObservabilityAIAssistantScreenContextRequest } from '@kbn/observability-ai-assistant-plugin/common/types'; +import { + createLlmProxy, + isFunctionTitleRequest, + LlmProxy, + LlmResponseSimulator, +} from '../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { createOpenAiChunk } from '../../../../../../observability_ai_assistant_api_integration/common/create_openai_chunk'; +import { + decodeEvents, + getConversationCreatedEvent, + getConversationUpdatedEvent, +} from '../../../../../../observability_ai_assistant_api_integration/tests/helpers'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { SupertestWithRoleScope } from '../../../../services/role_scoped_supertest'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const log = getService('log'); + const roleScopedSupertest = getService('roleScopedSupertest'); + + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + const messages: Message[] = [ + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.System, + content: 'You are a helpful assistant', + }, + }, + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.User, + content: 'Good morning, bot!', + // make sure it doesn't 400 on `data` being set + data: '{}', + }, + }, + ]; + + describe('/internal/observability_ai_assistant/chat/complete', () => { + let proxy: LlmProxy; + let connectorId: string; + + async function getEvents( + params: { screenContexts?: ObservabilityAIAssistantScreenContextRequest[] }, + cb: (conversationSimulator: LlmResponseSimulator) => Promise + ) { + const titleInterceptor = proxy.intercept('title', (body) => isFunctionTitleRequest(body)); + + const conversationInterceptor = proxy.intercept( + 'conversation', + (body) => !isFunctionTitleRequest(body) + ); + + const supertestEditorWithCookieCredentials: SupertestWithRoleScope = + await roleScopedSupertest.getSupertestWithRoleScope('editor', { + useCookieHeader: true, + withInternalHeaders: true, + }); + + const responsePromise = new Promise((resolve, reject) => { + supertestEditorWithCookieCredentials + .post('/internal/observability_ai_assistant/chat/complete') + .set('kbn-xsrf', 'foo') + .send({ + messages, + connectorId, + persist: true, + screenContexts: params.screenContexts || [], + scopes: ['all'], + }) + .then((response) => resolve(response)) + .catch((err) => reject(err)); + }); + + const [conversationSimulator, titleSimulator] = await Promise.all([ + conversationInterceptor.waitForIntercept(), + titleInterceptor.waitForIntercept(), + ]); + + await titleSimulator.status(200); + await titleSimulator.next('My generated title'); + await titleSimulator.tokenCount({ completion: 5, prompt: 10, total: 15 }); + await titleSimulator.complete(); + + await conversationSimulator.status(200); + await cb(conversationSimulator); + + const response = await responsePromise; + + return String(response.body) + .split('\n') + .map((line) => line.trim()) + .filter(Boolean) + .map((line) => JSON.parse(line) as StreamingChatResponseEvent) + .slice(2); // ignore context request/response, we're testing this elsewhere + } + + before(async () => { + proxy = await createLlmProxy(log); + connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ + port: proxy.getPort(), + }); + }); + + after(async () => { + proxy.close(); + await observabilityAIAssistantAPIClient.deleteActionConnector({ + actionId: connectorId, + }); + }); + + it('returns a streaming response from the server', async () => { + const interceptor = proxy.intercept('conversation', () => true); + + const receivedChunks: any[] = []; + + const passThrough = new PassThrough(); + + const supertestEditorWithCookieCredentials: SupertestWithRoleScope = + await roleScopedSupertest.getSupertestWithRoleScope('editor', { + useCookieHeader: true, + withInternalHeaders: true, + }); + + supertestEditorWithCookieCredentials + .post('/internal/observability_ai_assistant/chat/complete') + .set('kbn-xsrf', 'foo') + .send({ + messages, + connectorId, + persist: false, + screenContexts: [], + scopes: ['all'], + }) + .pipe(passThrough); + + passThrough.on('data', (chunk) => { + receivedChunks.push(chunk.toString()); + }); + + const simulator = await interceptor.waitForIntercept(); + + await simulator.status(200); + const chunk = JSON.stringify(createOpenAiChunk('Hello')); + + await simulator.rawWrite(`data: ${chunk.substring(0, 10)}`); + await simulator.rawWrite(`${chunk.substring(10)}\n\n`); + await simulator.tokenCount({ completion: 20, prompt: 33, total: 53 }); + await simulator.complete(); + + await new Promise((resolve) => passThrough.on('end', () => resolve())); + + const parsedEvents = decodeEvents(receivedChunks.join('')); + + expect( + parsedEvents + .map((event) => event.type) + .filter((eventType) => eventType !== StreamingChatResponseEventType.BufferFlush) + ).to.eql([ + StreamingChatResponseEventType.MessageAdd, + StreamingChatResponseEventType.MessageAdd, + StreamingChatResponseEventType.ChatCompletionChunk, + StreamingChatResponseEventType.ChatCompletionMessage, + StreamingChatResponseEventType.MessageAdd, + ]); + + const messageEvents = parsedEvents.filter( + (msg): msg is MessageAddEvent => msg.type === StreamingChatResponseEventType.MessageAdd + ); + + const chunkEvents = parsedEvents.filter( + (msg): msg is ChatCompletionChunkEvent => + msg.type === StreamingChatResponseEventType.ChatCompletionChunk + ); + + expect(omit(messageEvents[0], 'id', 'message.@timestamp')).to.eql({ + type: StreamingChatResponseEventType.MessageAdd, + message: { + message: { + content: '', + role: MessageRole.Assistant, + function_call: { + name: 'context', + trigger: MessageRole.Assistant, + }, + }, + }, + }); + + expect(omit(messageEvents[1], 'id', 'message.@timestamp')).to.eql({ + type: StreamingChatResponseEventType.MessageAdd, + message: { + message: { + role: MessageRole.User, + name: 'context', + content: JSON.stringify({ screen_description: '', learnings: [] }), + }, + }, + }); + + expect(omit(chunkEvents[0], 'id')).to.eql({ + type: StreamingChatResponseEventType.ChatCompletionChunk, + message: { + content: 'Hello', + }, + }); + + expect(omit(messageEvents[2], 'id', 'message.@timestamp')).to.eql({ + type: StreamingChatResponseEventType.MessageAdd, + message: { + message: { + content: 'Hello', + role: MessageRole.Assistant, + function_call: { + name: '', + arguments: '', + trigger: MessageRole.Assistant, + }, + }, + }, + }); + }); + + describe('when creating a new conversation', () => { + let events: StreamingChatResponseEvent[]; + + before(async () => { + events = await getEvents({}, async (conversationSimulator) => { + await conversationSimulator.next('Hello'); + await conversationSimulator.next(' again'); + await conversationSimulator.tokenCount({ completion: 0, prompt: 0, total: 0 }); + await conversationSimulator.complete(); + }).then((_events) => { + return _events.filter( + (event) => event.type !== StreamingChatResponseEventType.BufferFlush + ); + }); + }); + + it('creates a new conversation', async () => { + expect(omit(events[0], 'id')).to.eql({ + type: StreamingChatResponseEventType.ChatCompletionChunk, + message: { + content: 'Hello', + }, + }); + expect(omit(events[1], 'id')).to.eql({ + type: StreamingChatResponseEventType.ChatCompletionChunk, + message: { + content: ' again', + }, + }); + expect(omit(events[2], 'id', 'message.@timestamp')).to.eql({ + type: StreamingChatResponseEventType.ChatCompletionMessage, + message: { + content: 'Hello again', + }, + }); + expect(omit(events[3], 'id', 'message.@timestamp')).to.eql({ + type: StreamingChatResponseEventType.MessageAdd, + message: { + message: { + content: 'Hello again', + function_call: { + arguments: '', + name: '', + trigger: MessageRole.Assistant, + }, + role: MessageRole.Assistant, + }, + }, + }); + + expect( + omit( + events[4], + 'conversation.id', + 'conversation.last_updated', + 'conversation.token_count' + ) + ).to.eql({ + type: StreamingChatResponseEventType.ConversationCreate, + conversation: { + title: 'My generated title', + }, + }); + + const tokenCount = (events[4] as ConversationCreateEvent).conversation.token_count!; + + expect(tokenCount.completion).to.be.greaterThan(0); + expect(tokenCount.prompt).to.be.greaterThan(0); + + expect(tokenCount.total).to.eql(tokenCount.completion + tokenCount.prompt); + }); + + after(async () => { + const createdConversationId = events.filter( + (line): line is ConversationCreateEvent => + line.type === StreamingChatResponseEventType.ConversationCreate + )[0]?.conversation.id; + + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createdConversationId, + }, + }, + }); + + expect(status).to.be(200); + }); + }); + + describe('after executing a screen context action', () => { + let events: StreamingChatResponseEvent[]; + + before(async () => { + events = await getEvents( + { + screenContexts: [ + { + actions: [ + { + name: 'my_action', + description: 'My action', + parameters: { + type: 'object', + properties: { + foo: { + type: 'string', + }, + }, + }, + }, + ], + }, + ], + }, + async (conversationSimulator) => { + await conversationSimulator.next({ + tool_calls: [ + { + id: 'fake-id', + index: 'fake-index', + function: { + name: 'my_action', + arguments: JSON.stringify({ foo: 'bar' }), + }, + }, + ], + }); + await conversationSimulator.tokenCount({ completion: 0, prompt: 0, total: 0 }); + await conversationSimulator.complete(); + } + ); + }); + + it('closes the stream without persisting the conversation', () => { + expect( + pick( + events[events.length - 1], + 'message.message.content', + 'message.message.function_call', + 'message.message.role' + ) + ).to.eql({ + message: { + message: { + content: '', + function_call: { + name: 'my_action', + arguments: JSON.stringify({ foo: 'bar' }), + trigger: MessageRole.Assistant, + }, + role: MessageRole.Assistant, + }, + }, + }); + }); + + it('does not store the conversation', async () => { + expect( + events.filter((event) => event.type === StreamingChatResponseEventType.ConversationCreate) + .length + ).to.eql(0); + + const conversations = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/conversations', + }); + + expect(conversations.status).to.be(200); + + expect(conversations.body.conversations.length).to.be(0); + }); + }); + + describe('when updating an existing conversation', () => { + let conversationCreatedEvent: ConversationCreateEvent; + let conversationUpdatedEvent: ConversationUpdateEvent; + + before(async () => { + void proxy + .intercept('conversation_title', (body) => isFunctionTitleRequest(body), [ + { + function_call: { + name: 'title_conversation', + arguments: JSON.stringify({ title: 'LLM-generated title' }), + }, + }, + ]) + .completeAfterIntercept(); + + void proxy + .intercept('conversation', (body) => !isFunctionTitleRequest(body), 'Good morning, sir!') + .completeAfterIntercept(); + + const createResponse = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/chat/complete', + params: { + body: { + messages, + connectorId, + persist: true, + screenContexts: [], + scopes: ['observability'], + }, + }, + }); + + expect(createResponse.status).to.be(200); + + await proxy.waitForAllInterceptorsSettled(); + + conversationCreatedEvent = getConversationCreatedEvent(createResponse.body); + + const conversationId = conversationCreatedEvent.conversation.id; + const fullConversation = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId, + }, + }, + }); + + void proxy + .intercept('conversation', (body) => !isFunctionTitleRequest(body), 'Good night, sir!') + .completeAfterIntercept(); + + const updatedResponse = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/chat/complete', + params: { + body: { + messages: [ + ...fullConversation.body.messages, + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.User, + content: 'Good night, bot!', + }, + }, + ], + connectorId, + persist: true, + screenContexts: [], + conversationId, + scopes: ['observability'], + }, + }, + }); + + expect(updatedResponse.status).to.be(200); + + await proxy.waitForAllInterceptorsSettled(); + + conversationUpdatedEvent = getConversationUpdatedEvent(updatedResponse.body); + }); + + after(async () => { + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: conversationCreatedEvent.conversation.id, + }, + }, + }); + + expect(status).to.be(200); + }); + + it('has correct token count for a new conversation', async () => { + expect(conversationCreatedEvent.conversation.token_count?.completion).to.be.greaterThan(0); + expect(conversationCreatedEvent.conversation.token_count?.prompt).to.be.greaterThan(0); + expect(conversationCreatedEvent.conversation.token_count?.total).to.be.greaterThan(0); + }); + + it('has correct token count for the updated conversation', async () => { + expect(conversationUpdatedEvent.conversation.token_count!.total).to.be.greaterThan( + conversationCreatedEvent.conversation.token_count!.total + ); + }); + }); + + // todo + it.skip('executes a function', async () => {}); + + describe('security roles and access privileges', () => { + it('should deny access for users without the ai_assistant privilege', async () => { + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'POST /internal/observability_ai_assistant/chat/complete', + params: { + body: { + messages, + connectorId, + persist: false, + screenContexts: [], + scopes: ['all'], + }, + }, + }); + expect(status).to.be(403); + }); + }); + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/alerts.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/alerts.spec.ts new file mode 100644 index 0000000000000..10aa695a6df59 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/alerts.spec.ts @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { MessageRole, MessageAddEvent } from '@kbn/observability-ai-assistant-plugin/common'; +import expect from '@kbn/expect'; +import { + LlmProxy, + createLlmProxy, +} from '../../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { getMessageAddedEvents, invokeChatCompleteWithFunctionRequest } from './helpers'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const log = getService('log'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + describe('when calling the alerts function', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); + let proxy: LlmProxy; + let connectorId: string; + let alertsEvents: MessageAddEvent[]; + + const start = 'now-100h'; + const end = 'now'; + + before(async () => { + proxy = await createLlmProxy(log); + connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ + port: proxy.getPort(), + }); + + void proxy + .intercept('conversation', () => true, 'Hello from LLM Proxy') + .completeAfterIntercept(); + + const alertsResponseBody = await invokeChatCompleteWithFunctionRequest({ + connectorId, + observabilityAIAssistantAPIClient, + functionCall: { + name: 'alerts', + trigger: MessageRole.Assistant, + arguments: JSON.stringify({ start, end }), + }, + }); + + await proxy.waitForAllInterceptorsSettled(); + + alertsEvents = getMessageAddedEvents(alertsResponseBody); + }); + + after(async () => { + proxy.close(); + await observabilityAIAssistantAPIClient.deleteActionConnector({ + actionId: connectorId, + }); + }); + + // This test ensures that invoking the alerts function does not result in an error. + it('should execute the function without any errors', async () => { + const alertsFunctionResponse = alertsEvents[0]; + expect(alertsFunctionResponse.message.message.name).to.be('alerts'); + + const parsedAlertsResponse = JSON.parse(alertsFunctionResponse.message.message.content!); + + expect(parsedAlertsResponse).not.to.have.property('error'); + expect(parsedAlertsResponse).to.have.property('total'); + expect(parsedAlertsResponse).to.have.property('alerts'); + expect(parsedAlertsResponse.alerts).to.be.an('array'); + expect(parsedAlertsResponse.total).to.be(0); + expect(parsedAlertsResponse.alerts.length).to.be(0); + }); + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/elasticsearch.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/elasticsearch.spec.ts new file mode 100644 index 0000000000000..f4ada4ada4724 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/elasticsearch.spec.ts @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { MessageAddEvent, MessageRole } from '@kbn/observability-ai-assistant-plugin/common'; +import expect from '@kbn/expect'; +import { apm, timerange } from '@kbn/apm-synthtrace-client'; +import { ApmSynthtraceEsClient } from '@kbn/apm-synthtrace'; +import { ELASTICSEARCH_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/server/functions/elasticsearch'; +import { + LlmProxy, + createLlmProxy, +} from '../../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; +import { getMessageAddedEvents, invokeChatCompleteWithFunctionRequest } from './helpers'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const log = getService('log'); + const synthtrace = getService('synthtrace'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + describe('when calling elasticsearch', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); + let proxy: LlmProxy; + let connectorId: string; + let events: MessageAddEvent[]; + let apmSynthtraceEsClient: ApmSynthtraceEsClient; + + before(async () => { + proxy = await createLlmProxy(log); + apmSynthtraceEsClient = await synthtrace.createApmSynthtraceEsClient(); + connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ + port: proxy.getPort(), + }); + + // intercept the LLM request and return a fixed response + void proxy + .intercept('conversation', () => true, 'Hello from LLM Proxy') + .completeAfterIntercept(); + + await generateApmData(apmSynthtraceEsClient); + + const responseBody = await invokeChatCompleteWithFunctionRequest({ + connectorId, + observabilityAIAssistantAPIClient, + functionCall: { + name: ELASTICSEARCH_FUNCTION_NAME, + trigger: MessageRole.User, + arguments: JSON.stringify({ + method: 'POST', + path: 'traces*/_search', + body: { + size: 0, + aggs: { + services: { + terms: { + field: 'service.name', + }, + }, + }, + }, + }), + }, + }); + + await proxy.waitForAllInterceptorsSettled(); + + events = getMessageAddedEvents(responseBody); + }); + + after(async () => { + proxy.close(); + await observabilityAIAssistantAPIClient.deleteActionConnector({ + actionId: connectorId, + }); + await apmSynthtraceEsClient.clean(); + }); + + it('returns elasticsearch function response', async () => { + const esFunctionResponse = events[0]; + const parsedEsResponse = JSON.parse(esFunctionResponse.message.message.content!).response; + + expect(esFunctionResponse.message.message.name).to.be('elasticsearch'); + expect(parsedEsResponse.hits.total.value).to.be(15); + expect(parsedEsResponse.aggregations.services.buckets).to.eql([ + { key: 'java-backend', doc_count: 15 }, + ]); + expect(events.length).to.be(2); + }); + }); +} + +export async function generateApmData(apmSynthtraceEsClient: ApmSynthtraceEsClient) { + const serviceA = apm + .service({ name: 'java-backend', environment: 'production', agentName: 'java' }) + .instance('a'); + + const events = timerange('now-15m', 'now') + .interval('1m') + .rate(1) + .generator((timestamp) => { + return serviceA.transaction({ transactionName: 'tx' }).timestamp(timestamp).duration(1000); + }); + + return apmSynthtraceEsClient.index(events); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/helpers.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/helpers.ts new file mode 100644 index 0000000000000..b64295d3a255b --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/helpers.ts @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import expect from '@kbn/expect'; +import { + Message, + MessageAddEvent, + MessageRole, + StreamingChatResponseEvent, +} from '@kbn/observability-ai-assistant-plugin/common'; +import { Readable } from 'stream'; +import type { AssistantScope } from '@kbn/ai-assistant-common'; +import type { ObservabilityAIAssistantApiClient } from '../../../../../services/observability_ai_assistant_api'; + +function decodeEvents(body: Readable | string) { + return String(body) + .split('\n') + .map((line) => line.trim()) + .filter(Boolean) + .map((line) => JSON.parse(line) as StreamingChatResponseEvent); +} + +export function getMessageAddedEvents(body: Readable | string) { + return decodeEvents(body).filter( + (event): event is MessageAddEvent => event.type === 'messageAdd' + ); +} + +export async function invokeChatCompleteWithFunctionRequest({ + connectorId, + observabilityAIAssistantAPIClient, + functionCall, + scopes, +}: { + connectorId: string; + observabilityAIAssistantAPIClient: ObservabilityAIAssistantApiClient; + functionCall: Message['message']['function_call']; + scopes?: AssistantScope[]; +}) { + const { status, body } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/chat/complete', + params: { + body: { + messages: [ + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.Assistant, + content: '', + function_call: functionCall, + }, + }, + ], + connectorId, + persist: false, + screenContexts: [], + scopes: scopes || ['observability' as AssistantScope], + }, + }, + }); + + expect(status).to.be(200); + + return body; +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/summarize.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/summarize.spec.ts new file mode 100644 index 0000000000000..3f90108f43d73 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/complete/functions/summarize.spec.ts @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { MessageRole } from '@kbn/observability-ai-assistant-plugin/common'; +import expect from '@kbn/expect'; +import { + LlmProxy, + createLlmProxy, +} from '../../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context'; +import { invokeChatCompleteWithFunctionRequest } from './helpers'; +import { + TINY_ELSER, + clearKnowledgeBase, + createKnowledgeBaseModel, + deleteInferenceEndpoint, + deleteKnowledgeBaseModel, +} from '../../knowledge_base/helpers'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const log = getService('log'); + const ml = getService('ml'); + const es = getService('es'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + describe('when calling summarize function', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); + let proxy: LlmProxy; + let connectorId: string; + + before(async () => { + await createKnowledgeBaseModel(ml); + const { status } = await observabilityAIAssistantAPIClient.admin({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + params: { + query: { + model_id: TINY_ELSER.id, + }, + }, + }); + expect(status).to.be(200); + + proxy = await createLlmProxy(log); + connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ + port: proxy.getPort(), + }); + + // intercept the LLM request and return a fixed response + void proxy + .intercept('conversation', () => true, 'Hello from LLM Proxy') + .completeAfterIntercept(); + + await invokeChatCompleteWithFunctionRequest({ + connectorId, + observabilityAIAssistantAPIClient, + functionCall: { + name: 'summarize', + trigger: MessageRole.User, + arguments: JSON.stringify({ + title: 'My Title', + text: 'Hello world', + is_correction: false, + confidence: 'high', + public: false, + }), + }, + }); + + await proxy.waitForAllInterceptorsSettled(); + }); + + after(async () => { + proxy.close(); + + await observabilityAIAssistantAPIClient.deleteActionConnector({ + actionId: connectorId, + }); + await deleteKnowledgeBaseModel(ml); + await clearKnowledgeBase(es); + await deleteInferenceEndpoint({ es }); + }); + + it('persists entry in knowledge base', async () => { + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'title', + sortDirection: 'asc', + }, + }, + }); + + const { role, public: isPublic, text, type, user, title } = res.body.entries[0]; + + expect(role).to.eql('assistant_summarization'); + expect(isPublic).to.eql(false); + expect(text).to.eql('Hello world'); + expect(type).to.eql('contextual'); + expect(user?.name).to.eql('elastic_editor'); + expect(title).to.eql('My Title'); + expect(res.body.entries).to.have.length(1); + }); + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/connectors/connectors.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/connectors/connectors.spec.ts new file mode 100644 index 0000000000000..43b3a4c824957 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/connectors/connectors.spec.ts @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + describe('List connectors', () => { + before(async () => { + await observabilityAIAssistantAPIClient.deleteAllActionConnectors(); + }); + + after(async () => { + await observabilityAIAssistantAPIClient.deleteAllActionConnectors(); + }); + + it('Returns a 2xx for enterprise license', async () => { + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/connectors', + }); + + expect(status).to.be(200); + }); + + it('returns an empty list of connectors', async () => { + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/connectors', + }); + + expect(res.body.length).to.be(0); + }); + + it("returns the gen ai connector if it's been created", async () => { + const connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ + port: 1234, + }); + + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/connectors', + }); + + expect(res.body.length).to.be(1); + + await observabilityAIAssistantAPIClient.deleteActionConnector({ actionId: connectorId }); + }); + + describe('security roles and access privileges', () => { + it('should deny access for users without the ai_assistant privilege', async () => { + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: `GET /internal/observability_ai_assistant/connectors`, + }); + expect(status).to.be(403); + }); + }); + }); +} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/conversations.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/conversations/conversations.spec.ts similarity index 53% rename from x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/conversations.spec.ts rename to x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/conversations/conversations.spec.ts index 6656ea0407817..e667c75c1bd3c 100644 --- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/conversations.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/conversations/conversations.spec.ts @@ -12,11 +12,11 @@ import { type ConversationUpdateRequest, MessageRole, } from '@kbn/observability-ai-assistant-plugin/common/types'; -import type { FtrProviderContext } from '../../common/ftr_provider_context'; -import type { SupertestReturnType } from '../../common/observability_ai_assistant_api_client'; +import type { SupertestReturnType } from '../../../../services/observability_ai_assistant_api'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; -export default function ApiTest({ getService }: FtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); const conversationCreate: ConversationCreateRequest = { '@timestamp': new Date().toISOString(), @@ -47,90 +47,84 @@ export default function ApiTest({ getService }: FtrProviderContext) { describe('Conversations', () => { describe('without conversations', () => { it('returns no conversations when listing', async () => { - const response = await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'POST /internal/observability_ai_assistant/conversations', - }) - .expect(200); + const { status, body } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/conversations', + }); - expect(response.body).to.eql({ conversations: [] }); + expect(status).to.be(200); + + expect(body).to.eql({ conversations: [] }); }); it('returns a 404 for updating conversations', async () => { - await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: 'non-existing-conversation-id', - }, - body: { - conversation: conversationUpdate, - }, + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: 'non-existing-conversation-id', + }, + body: { + conversation: conversationUpdate, }, - }) - .expect(404); + }, + }); + expect(status).to.be(404); }); it('returns a 404 for retrieving a conversation', async () => { - await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: 'my-conversation-id', - }, + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: 'my-conversation-id', }, - }) - .expect(404); + }, + }); + expect(status).to.be(404); }); }); - describe('when creating a conversation with the write user', function () { + describe('when creating a conversation with the write user', () => { let createResponse: Awaited< SupertestReturnType<'POST /internal/observability_ai_assistant/conversation'> >; - before(async () => { - createResponse = await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'POST /internal/observability_ai_assistant/conversation', - params: { - body: { - conversation: conversationCreate, - }, + createResponse = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/conversation', + params: { + body: { + conversation: conversationCreate, }, - }) - .expect(200); + }, + }); + expect(createResponse.status).to.be(200); }); after(async () => { - await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createResponse.body.conversation.id, }, - }) - .expect(200); + }, + }); + expect(status).to.be(200); - await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createResponse.body.conversation.id, }, - }) - .expect(404); + }, + }); + expect(res.status).to.be(404); }); - it('returns the conversation', function () { + it('returns the conversation', () => { // delete user from response to avoid comparing it as it will be different in MKI - delete createResponse.body.user; + // delete createResponse.body.user; expect(createResponse.body).to.eql({ '@timestamp': createResponse.body['@timestamp'], conversation: { @@ -143,66 +137,64 @@ export default function ApiTest({ getService }: FtrProviderContext) { messages: conversationCreate.messages, namespace: 'default', public: conversationCreate.public, + user: { + id: 'u_gf3TRV5WWjD0PQCcTzkUyRE8By8uUt90gK-rT9ZPhA4_0', + name: 'elastic_editor', + }, }); }); it('returns a 404 for updating a non-existing conversation', async () => { - await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: 'non-existing-conversation-id', - }, - body: { - conversation: conversationUpdate, - }, + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: 'non-existing-conversation-id', }, - }) - .expect(404); + body: { + conversation: conversationUpdate, + }, + }, + }); + expect(status).to.be(404); }); it('returns a 404 for retrieving a non-existing conversation', async () => { - await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: 'non-existing-conversation-id', - }, + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: 'non-existing-conversation-id', }, - }) - .expect(404); + }, + }); + expect(status).to.be(404); }); it('returns the conversation that was created', async () => { - const response = await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, + const response = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createResponse.body.conversation.id, }, - }) - .expect(200); + }, + }); + + expect(response.status).to.be(200); - // delete user from response to avoid comparing it as it will be different in MKI - delete response.body.user; expect(response.body).to.eql(createResponse.body); }); it('returns the created conversation when listing', async () => { - const response = await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'POST /internal/observability_ai_assistant/conversations', - }) - .expect(200); - // delete user from response to avoid comparing it as it will be different in MKI - delete response.body.conversations[0].user; + const response = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/conversations', + }); + + expect(response.status).to.be(200); + expect(response.body.conversations[0]).to.eql(createResponse.body); }); - // TODO it.skip('returns a 404 when reading it with another user', () => {}); @@ -212,21 +204,20 @@ export default function ApiTest({ getService }: FtrProviderContext) { >; before(async () => { - updateResponse = await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - body: { - conversation: merge(omit(conversationUpdate, 'conversation.id'), { - conversation: { id: createResponse.body.conversation.id }, - }), - }, + updateResponse = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createResponse.body.conversation.id, }, - }) - .expect(200); + body: { + conversation: merge(omit(conversationUpdate, 'conversation.id'), { + conversation: { id: createResponse.body.conversation.id }, + }), + }, + }, + }); + expect(updateResponse.status).to.be(200); }); it('returns the updated conversation as response', async () => { @@ -236,16 +227,16 @@ export default function ApiTest({ getService }: FtrProviderContext) { }); it('returns the updated conversation after get', async () => { - const updateAfterCreateResponse = await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, + const updateAfterCreateResponse = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createResponse.body.conversation.id, }, - }) - .expect(200); + }, + }); + + expect(updateAfterCreateResponse.status).to.be(200); expect(updateAfterCreateResponse.body.conversation.title).to.eql( conversationUpdate.conversation.title @@ -253,101 +244,94 @@ export default function ApiTest({ getService }: FtrProviderContext) { }); }); }); - describe('security roles and access privileges', () => { describe('should deny access for users without the ai_assistant privilege', () => { let createResponse: Awaited< SupertestReturnType<'POST /internal/observability_ai_assistant/conversation'> >; before(async () => { - createResponse = await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'POST /internal/observability_ai_assistant/conversation', - params: { - body: { - conversation: conversationCreate, - }, + createResponse = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/conversation', + params: { + body: { + conversation: conversationCreate, }, - }) - .expect(200); + }, + }); + expect(createResponse.status).to.be(200); }); after(async () => { - await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, + const response = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createResponse.body.conversation.id, }, - }) - .expect(200); + }, + }); + expect(response.status).to.be(200); }); it('POST /internal/observability_ai_assistant/conversation', async () => { - await observabilityAIAssistantAPIClient - .slsUnauthorized({ - endpoint: 'POST /internal/observability_ai_assistant/conversation', - params: { - body: { - conversation: conversationCreate, - }, + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'POST /internal/observability_ai_assistant/conversation', + params: { + body: { + conversation: conversationCreate, }, - }) - .expect(403); + }, + }); + + expect(status).to.be(403); }); it('POST /internal/observability_ai_assistant/conversations', async () => { - await observabilityAIAssistantAPIClient - .slsUnauthorized({ - endpoint: 'POST /internal/observability_ai_assistant/conversations', - }) - .expect(403); + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'POST /internal/observability_ai_assistant/conversations', + }); + expect(status).to.be(403); }); it('PUT /internal/observability_ai_assistant/conversation/{conversationId}', async () => { - await observabilityAIAssistantAPIClient - .slsUnauthorized({ - endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - body: { - conversation: merge(omit(conversationUpdate, 'conversation.id'), { - conversation: { id: createResponse.body.conversation.id }, - }), - }, + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createResponse.body.conversation.id, + }, + body: { + conversation: merge(omit(conversationUpdate, 'conversation.id'), { + conversation: { id: createResponse.body.conversation.id }, + }), }, - }) - .expect(403); + }, + }); + expect(status).to.be(403); }); it('GET /internal/observability_ai_assistant/conversation/{conversationId}', async () => { - await observabilityAIAssistantAPIClient - .slsUnauthorized({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createResponse.body.conversation.id, }, - }) - .expect(403); + }, + }); + expect(status).to.be(403); }); it('DELETE /internal/observability_ai_assistant/conversation/{conversationId}', async () => { - await observabilityAIAssistantAPIClient - .slsUnauthorized({ - endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId: createResponse.body.conversation.id, }, - }) - .expect(403); + }, + }); + expect(status).to.be(403); }); }); }); diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/conversations/helpers.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/helpers.ts similarity index 100% rename from x-pack/test/observability_ai_assistant_api_integration/tests/conversations/helpers.ts rename to x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/helpers.ts diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index.ts new file mode 100644 index 0000000000000..3f756ecd11247 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/index.ts @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type { DeploymentAgnosticFtrProviderContext } from '../../../ftr_provider_context'; + +export default function aiAssistantApiIntegrationTests({ + loadTestFile, +}: DeploymentAgnosticFtrProviderContext) { + describe('observability AI Assistant', function () { + loadTestFile(require.resolve('./conversations/conversations.spec.ts')); + loadTestFile(require.resolve('./connectors/connectors.spec.ts')); + loadTestFile(require.resolve('./chat/chat.spec.ts')); + loadTestFile(require.resolve('./complete/complete.spec.ts')); + loadTestFile(require.resolve('./complete/functions/alerts.spec.ts')); + loadTestFile(require.resolve('./complete/functions/elasticsearch.spec.ts')); + loadTestFile(require.resolve('./complete/functions/summarize.spec.ts')); + loadTestFile(require.resolve('./public_complete/public_complete.spec.ts')); + loadTestFile(require.resolve('./knowledge_base/knowledge_base_setup.spec.ts')); + loadTestFile(require.resolve('./knowledge_base/knowledge_base_migration.spec.ts')); + loadTestFile(require.resolve('./knowledge_base/knowledge_base_status.spec.ts')); + loadTestFile(require.resolve('./knowledge_base/knowledge_base.spec.ts')); + loadTestFile(require.resolve('./knowledge_base/knowledge_base_user_instructions.spec.ts')); + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/helpers.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/helpers.ts new file mode 100644 index 0000000000000..833cb0fd010cd --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/helpers.ts @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { Client } from '@elastic/elasticsearch'; +import { AI_ASSISTANT_KB_INFERENCE_ID } from '@kbn/observability-ai-assistant-plugin/server/service/inference_endpoint'; +import { MachineLearningProvider } from '../../../../../services/ml'; +import { SUPPORTED_TRAINED_MODELS } from '../../../../../../functional/services/ml/api'; + +export const TINY_ELSER = { + ...SUPPORTED_TRAINED_MODELS.TINY_ELSER, + id: SUPPORTED_TRAINED_MODELS.TINY_ELSER.name, +}; + +export async function createKnowledgeBaseModel(ml: ReturnType) { + const config = { + ...ml.api.getTrainedModelConfig(TINY_ELSER.name), + input: { + field_names: ['text_field'], + }, + }; + // necessary for MKI, check indices before importing model. compatible with stateful + await ml.api.assureMlStatsIndexExists(); + await ml.api.importTrainedModel(TINY_ELSER.name, TINY_ELSER.id, config); +} + +export async function deleteKnowledgeBaseModel(ml: ReturnType) { + await ml.api.stopTrainedModelDeploymentES(TINY_ELSER.id, true); + await ml.api.deleteTrainedModelES(TINY_ELSER.id); + await ml.testResources.cleanMLSavedObjects(); +} + +export async function clearKnowledgeBase(es: Client) { + const KB_INDEX = '.kibana-observability-ai-assistant-kb-*'; + + return es.deleteByQuery({ + index: KB_INDEX, + conflicts: 'proceed', + query: { match_all: {} }, + refresh: true, + }); +} + +export async function clearConversations(es: Client) { + const KB_INDEX = '.kibana-observability-ai-assistant-conversations-*'; + + return es.deleteByQuery({ + index: KB_INDEX, + conflicts: 'proceed', + query: { match_all: {} }, + refresh: true, + }); +} + +export async function deleteInferenceEndpoint({ + es, + name = AI_ASSISTANT_KB_INFERENCE_ID, +}: { + es: Client; + name?: string; +}) { + return es.inference.delete({ inference_id: name, force: true }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base.spec.ts new file mode 100644 index 0000000000000..6496711a5bab0 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base.spec.ts @@ -0,0 +1,253 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { type KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { + TINY_ELSER, + clearKnowledgeBase, + createKnowledgeBaseModel, + deleteInferenceEndpoint, + deleteKnowledgeBaseModel, +} from './helpers'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const ml = getService('ml'); + const es = getService('es'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + describe('Knowledge base', function () { + // TODO: https://github.com/elastic/kibana/issues/192886 kb/setup error + this.tags(['skipMKI']); + before(async () => { + await createKnowledgeBaseModel(ml); + + const { status } = await observabilityAIAssistantAPIClient.admin({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + params: { + query: { + model_id: TINY_ELSER.id, + }, + }, + }); + + expect(status).to.be(200); + }); + + after(async () => { + await deleteKnowledgeBaseModel(ml); + await deleteInferenceEndpoint({ es }); + await clearKnowledgeBase(es); + }); + + describe('when managing a single entry', () => { + const knowledgeBaseEntry = { + id: 'my-doc-id-1', + title: 'My title', + text: 'My content', + }; + it('returns 200 on create', async () => { + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save', + params: { body: knowledgeBaseEntry }, + }); + expect(status).to.be(200); + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'title', + sortDirection: 'asc', + }, + }, + }); + const entry = res.body.entries[0]; + expect(entry.id).to.equal(knowledgeBaseEntry.id); + expect(entry.title).to.equal(knowledgeBaseEntry.title); + expect(entry.text).to.equal(knowledgeBaseEntry.text); + }); + + it('returns 200 on get entries and entry exists', async () => { + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'title', + sortDirection: 'asc', + }, + }, + }); + + expect(res.status).to.be(200); + const entry = res.body.entries[0]; + expect(entry.id).to.equal(knowledgeBaseEntry.id); + expect(entry.title).to.equal(knowledgeBaseEntry.title); + expect(entry.text).to.equal(knowledgeBaseEntry.text); + }); + + it('returns 200 on delete', async () => { + const entryId = 'my-doc-id-1'; + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'DELETE /internal/observability_ai_assistant/kb/entries/{entryId}', + params: { + path: { entryId }, + }, + }); + expect(status).to.be(200); + + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'title', + sortDirection: 'asc', + }, + }, + }); + + expect(res.status).to.be(200); + expect(res.body.entries.filter((entry) => entry.id.startsWith('my-doc-id')).length).to.eql( + 0 + ); + }); + + it('returns 500 on delete not found', async () => { + const entryId = 'my-doc-id-1'; + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'DELETE /internal/observability_ai_assistant/kb/entries/{entryId}', + params: { + path: { entryId }, + }, + }); + expect(status).to.be(500); + }); + }); + + describe('when managing multiple entries', () => { + async function getEntries({ + query = '', + sortBy = 'title', + sortDirection = 'asc', + }: { query?: string; sortBy?: string; sortDirection?: 'asc' | 'desc' } = {}) { + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { query, sortBy, sortDirection }, + }, + }); + expect(res.status).to.be(200); + + return omitCategories(res.body.entries); + } + + beforeEach(async () => { + await clearKnowledgeBase(es); + + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/kb/entries/import', + params: { + body: { + entries: [ + { + id: 'my_doc_a', + title: 'My title a', + text: 'My content a', + }, + { + id: 'my_doc_b', + title: 'My title b', + text: 'My content b', + }, + { + id: 'my_doc_c', + title: 'My title c', + text: 'My content c', + }, + ], + }, + }, + }); + expect(status).to.be(200); + }); + + afterEach(async () => { + await clearKnowledgeBase(es); + }); + + it('returns 200 on create', async () => { + const entries = await getEntries(); + expect(omitCategories(entries).length).to.eql(3); + }); + + describe('when sorting ', () => { + const ascendingOrder = ['my_doc_a', 'my_doc_b', 'my_doc_c']; + + it('allows sorting ascending', async () => { + const entries = await getEntries({ sortBy: 'title', sortDirection: 'asc' }); + expect(entries.map(({ id }) => id)).to.eql(ascendingOrder); + }); + + it('allows sorting descending', async () => { + const entries = await getEntries({ sortBy: 'title', sortDirection: 'desc' }); + expect(entries.map(({ id }) => id)).to.eql([...ascendingOrder].reverse()); + }); + }); + + it('allows searching by title', async () => { + const entries = await getEntries({ query: 'b' }); + expect(entries.length).to.eql(1); + expect(entries[0].title).to.eql('My title b'); + }); + }); + + describe('security roles and access privileges', () => { + describe('should deny access for users without the ai_assistant privilege', () => { + it('POST /internal/observability_ai_assistant/kb/entries/save', async () => { + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save', + params: { + body: { + id: 'my-doc-id-1', + title: 'My title', + text: 'My content', + }, + }, + }); + expect(status).to.be(403); + }); + + it('GET /internal/observability_ai_assistant/kb/entries', async () => { + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { query: '', sortBy: 'title', sortDirection: 'asc' }, + }, + }); + expect(status).to.be(403); + }); + + it('DELETE /internal/observability_ai_assistant/kb/entries/{entryId}', async () => { + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'DELETE /internal/observability_ai_assistant/kb/entries/{entryId}', + params: { + path: { entryId: 'my-doc-id-1' }, + }, + }); + expect(status).to.be(403); + }); + }); + }); + }); +} + +function omitCategories(entries: KnowledgeBaseEntry[]) { + return entries.filter((entry) => entry.labels?.category === undefined); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_migration.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_migration.spec.ts new file mode 100644 index 0000000000000..01d1f0638813d --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_migration.spec.ts @@ -0,0 +1,159 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { orderBy } from 'lodash'; +import expect from '@kbn/expect'; +import { AI_ASSISTANT_KB_INFERENCE_ID } from '@kbn/observability-ai-assistant-plugin/server/service/inference_endpoint'; +import { SearchResponse } from '@elastic/elasticsearch/lib/api/typesWithBodyKey'; +import { KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { + deleteKnowledgeBaseModel, + createKnowledgeBaseModel, + clearKnowledgeBase, + deleteInferenceEndpoint, + TINY_ELSER, +} from './helpers'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + const esArchiver = getService('esArchiver'); + const es = getService('es'); + const ml = getService('ml'); + + const archive = + 'x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15'; + + describe('When there are knowledge base entries (from 8.15 or earlier) that does not contain semantic_text embeddings', () => { + before(async () => { + await clearKnowledgeBase(es); + await esArchiver.load(archive); + await createKnowledgeBaseModel(ml); + const { status } = await observabilityAIAssistantAPIClient.admin({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + params: { + query: { + model_id: TINY_ELSER.id, + }, + }, + }); + + expect(status).to.be(200); + }); + + after(async () => { + await clearKnowledgeBase(es); + await esArchiver.unload(archive); + await deleteKnowledgeBaseModel(ml); + await deleteInferenceEndpoint({ es }); + }); + + async function getKnowledgeBaseEntries() { + const res = (await es.search({ + index: '.kibana-observability-ai-assistant-kb*', + body: { + query: { + match_all: {}, + }, + }, + })) as SearchResponse< + KnowledgeBaseEntry & { + semantic_text: { + text: string; + inference: { inference_id: string; chunks: Array<{ text: string; embeddings: any }> }; + }; + } + >; + + return res.hits.hits; + } + + describe('before migrating', () => { + it('the docs do not have semantic_text embeddings', async () => { + const hits = await getKnowledgeBaseEntries(); + const hasSemanticTextEmbeddings = hits.some((hit) => hit._source?.semantic_text); + expect(hasSemanticTextEmbeddings).to.be(false); + }); + }); + + describe('after migrating', () => { + before(async () => { + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/kb/semantic_text_migration', + }); + expect(status).to.be(200); + }); + + it('the docs have semantic_text embeddings', async () => { + const hits = await getKnowledgeBaseEntries(); + const hasSemanticTextEmbeddings = hits.every((hit) => hit._source?.semantic_text); + expect(hasSemanticTextEmbeddings).to.be(true); + + expect( + orderBy(hits, '_source.title').map(({ _source }) => { + const { text, inference } = _source?.semantic_text!; + + return { + text, + inferenceId: inference.inference_id, + chunkCount: inference.chunks.length, + }; + }) + ).to.eql([ + { + text: 'To infinity and beyond!', + inferenceId: AI_ASSISTANT_KB_INFERENCE_ID, + chunkCount: 1, + }, + { + text: "The user's favourite color is blue.", + inferenceId: AI_ASSISTANT_KB_INFERENCE_ID, + chunkCount: 1, + }, + ]); + }); + + it('returns entries correctly via API', async () => { + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'POST /internal/observability_ai_assistant/kb/semantic_text_migration', + }); + + expect(status).to.be(200); + + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/entries', + params: { + query: { + query: '', + sortBy: 'title', + sortDirection: 'asc', + }, + }, + }); + + expect(res.status).to.be(200); + + expect( + res.body.entries.map(({ title, text, role, type }) => ({ title, text, role, type })) + ).to.eql([ + { + role: 'user_entry', + title: 'Toy Story quote', + type: 'contextual', + text: 'To infinity and beyond!', + }, + { + role: 'assistant_summarization', + title: "User's favourite color", + type: 'contextual', + text: "The user's favourite color is blue.", + }, + ]); + }); + }); + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_setup.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_setup.spec.ts new file mode 100644 index 0000000000000..1f2afc4b35d37 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_setup.spec.ts @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { + deleteKnowledgeBaseModel, + createKnowledgeBaseModel, + TINY_ELSER, + deleteInferenceEndpoint, +} from './helpers'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const ml = getService('ml'); + const es = getService('es'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + const KNOWLEDGE_BASE_SETUP_API_URL = '/internal/observability_ai_assistant/kb/setup'; + + describe('/internal/observability_ai_assistant/kb/setup', function () { + // TODO: https://github.com/elastic/kibana/issues/192886 kb/setup error + this.tags(['skipMKI']); + it('returns model info when successful', async () => { + await createKnowledgeBaseModel(ml); + const res = await observabilityAIAssistantAPIClient.admin({ + endpoint: `POST ${KNOWLEDGE_BASE_SETUP_API_URL}`, + params: { + query: { + model_id: TINY_ELSER.id, + }, + }, + }); + + expect(res.status).to.be(200); + + expect(res.body.service_settings.model_id).to.be('pt_tiny_elser'); + expect(res.body.inference_id).to.be('obs_ai_assistant_kb_inference'); + + await deleteKnowledgeBaseModel(ml); + await deleteInferenceEndpoint({ es }); + }); + + it('returns error message if model is not deployed', async () => { + const res = await observabilityAIAssistantAPIClient.admin({ + endpoint: `POST ${KNOWLEDGE_BASE_SETUP_API_URL}`, + params: { + query: { + model_id: TINY_ELSER.id, + }, + }, + }); + + expect(res.status).to.be(500); + + // @ts-expect-error + expect(res.body.message).to.include.string( + 'No known trained model with model_id [pt_tiny_elser]' + ); + + // @ts-expect-error + expect(res.body.statusCode).to.be(500); + }); + + describe('security roles and access privileges', () => { + it('should deny access for users without the ai_assistant privilege', async () => { + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: `POST ${KNOWLEDGE_BASE_SETUP_API_URL}`, + params: { + query: { + model_id: TINY_ELSER.id, + }, + }, + }); + expect(status).to.be(403); + }); + }); + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_status.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_status.spec.ts new file mode 100644 index 0000000000000..b0d8f3158f7ff --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_status.spec.ts @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { + deleteKnowledgeBaseModel, + createKnowledgeBaseModel, + TINY_ELSER, + deleteInferenceEndpoint, +} from './helpers'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const ml = getService('ml'); + const es = getService('es'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + + const KNOWLEDGE_BASE_STATUS_API_URL = '/internal/observability_ai_assistant/kb/status'; + + describe('/internal/observability_ai_assistant/kb/status', () => { + beforeEach(async () => { + await createKnowledgeBaseModel(ml); + const { status } = await observabilityAIAssistantAPIClient.admin({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + params: { + query: { + model_id: TINY_ELSER.id, + }, + }, + }); + + expect(status).to.be(200); + }); + + afterEach(async () => { + await deleteKnowledgeBaseModel(ml).catch((e) => {}); + await deleteInferenceEndpoint({ es }).catch((e) => {}); + }); + + it('returns correct status after knowledge base is setup', async () => { + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: `GET ${KNOWLEDGE_BASE_STATUS_API_URL}`, + }); + + expect(res.status).to.be(200); + + expect(res.body.ready).to.be(true); + expect(res.body.enabled).to.be(true); + expect(res.body.endpoint?.service_settings?.model_id).to.eql(TINY_ELSER.id); + }); + + it('returns correct status after model is deleted', async () => { + await deleteKnowledgeBaseModel(ml); + + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: `GET ${KNOWLEDGE_BASE_STATUS_API_URL}`, + }); + + expect(res.status).to.be(200); + + expect(res.body.ready).to.be(false); + expect(res.body.enabled).to.be(true); + expect(res.body.errorMessage).to.include.string( + 'No known trained model with model_id [pt_tiny_elser]' + ); + }); + + it('returns correct status after inference endpoint is deleted', async () => { + await deleteInferenceEndpoint({ es }); + + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: `GET ${KNOWLEDGE_BASE_STATUS_API_URL}`, + }); + + expect(res.status).to.be(200); + + expect(res.body.ready).to.be(false); + expect(res.body.enabled).to.be(true); + expect(res.body.errorMessage).to.include.string( + 'Inference endpoint not found [obs_ai_assistant_kb_inference]' + ); + }); + + describe('security roles and access privileges', () => { + it('should deny access for users without the ai_assistant privilege', async () => { + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: `GET ${KNOWLEDGE_BASE_STATUS_API_URL}`, + }); + expect(status).to.be(403); + }); + }); + }); +} diff --git a/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_user_instructions.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_user_instructions.spec.ts new file mode 100644 index 0000000000000..2c408a3308cb4 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_user_instructions.spec.ts @@ -0,0 +1,395 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import expect from '@kbn/expect'; +import { sortBy } from 'lodash'; +import { Message, MessageRole } from '@kbn/observability-ai-assistant-plugin/common'; +import { CONTEXT_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/server/functions/context'; +import { Instruction } from '@kbn/observability-ai-assistant-plugin/common/types'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; +import { + TINY_ELSER, + clearConversations, + clearKnowledgeBase, + createKnowledgeBaseModel, + deleteInferenceEndpoint, + deleteKnowledgeBaseModel, +} from './helpers'; +import { getConversationCreatedEvent } from '../helpers'; +import { + LlmProxy, + createLlmProxy, +} from '../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; + +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); + const es = getService('es'); + const ml = getService('ml'); + const log = getService('log'); + const retry = getService('retry'); + + describe('Knowledge base user instructions', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); + before(async () => { + await createKnowledgeBaseModel(ml); + const { status } = await observabilityAIAssistantAPIClient.admin({ + endpoint: 'POST /internal/observability_ai_assistant/kb/setup', + params: { + query: { + model_id: TINY_ELSER.id, + }, + }, + }); + expect(status).to.be(200); + }); + + after(async () => { + await deleteKnowledgeBaseModel(ml); + await deleteInferenceEndpoint({ es }); + await clearKnowledgeBase(es); + await clearConversations(es); + }); + + describe('when creating private and public user instructions', () => { + before(async () => { + await clearKnowledgeBase(es); + + const promises = [ + { + username: 'editor' as const, + isPublic: true, + }, + { + username: 'editor' as const, + isPublic: false, + }, + { + username: 'secondary_editor' as const, + isPublic: true, + }, + { + username: 'secondary_editor' as const, + isPublic: false, + }, + ].map(async ({ username, isPublic }) => { + const visibility = isPublic ? 'Public' : 'Private'; + const user = username === 'editor' ? 'editor' : 'admin'; + + const { status } = await observabilityAIAssistantAPIClient[user]({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: `${visibility.toLowerCase()}-doc-from-${username}`, + text: `${visibility} user instruction from "${username}"`, + public: isPublic, + }, + }, + }); + expect(status).to.be(200); + }); + + await Promise.all(promises); + }); + it('"editor" can retrieve their own private instructions and the public instruction', async () => { + await retry.try(async () => { + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/user_instructions', + }); + + const instructions = res.body.userInstructions; + expect(instructions).to.have.length(3); + + const sortById = (data: Array) => sortBy(data, 'id'); + + expect(sortById(instructions)).to.eql( + sortById([ + { + id: 'private-doc-from-editor', + public: false, + text: 'Private user instruction from "editor"', + }, + { + id: 'public-doc-from-editor', + public: true, + text: 'Public user instruction from "editor"', + }, + { + id: 'public-doc-from-secondary_editor', + public: true, + text: 'Public user instruction from "secondary_editor"', + }, + ]) + ); + }); + }); + + it('"secondaryEditor" can retrieve their own private instructions and the public instruction', async () => { + await retry.try(async () => { + const res = await observabilityAIAssistantAPIClient.admin({ + endpoint: 'GET /internal/observability_ai_assistant/kb/user_instructions', + }); + + const instructions = res.body.userInstructions; + expect(instructions).to.have.length(3); + + const sortById = (data: Array) => sortBy(data, 'id'); + + expect(sortById(instructions)).to.eql( + sortById([ + { + id: 'public-doc-from-editor', + public: true, + text: 'Public user instruction from "editor"', + }, + { + id: 'public-doc-from-secondary_editor', + public: true, + text: 'Public user instruction from "secondary_editor"', + }, + { + id: 'private-doc-from-secondary_editor', + public: false, + text: 'Private user instruction from "secondary_editor"', + }, + ]) + ); + }); + }); + }); + describe('when updating an existing user instructions', () => { + before(async () => { + await clearKnowledgeBase(es); + + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: 'doc-to-update', + text: 'Initial text', + public: true, + }, + }, + }); + expect(status).to.be(200); + + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: 'doc-to-update', + text: 'Updated text', + public: false, + }, + }, + }); + expect(res.status).to.be(200); + }); + + it('updates the user instruction', async () => { + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/user_instructions', + }); + const instructions = res.body.userInstructions; + + expect(instructions).to.eql([ + { + id: 'doc-to-update', + text: 'Updated text', + public: false, + }, + ]); + }); + }); + + describe('when a user instruction exist and a conversation is created', () => { + let proxy: LlmProxy; + let connectorId: string; + + const userInstructionText = + 'Be polite and use language that is easy to understand. Never disagree with the user.'; + + async function getConversationForUser(username: string) { + const user = username === 'editor' ? 'editor' : 'admin'; + + // the user instruction is always created by "editor" user + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: 'private-instruction-about-language', + text: userInstructionText, + public: false, + }, + }, + }); + + expect(status).to.be(200); + + const interceptPromises = [ + proxy.interceptConversationTitle('LLM-generated title').completeAfterIntercept(), + proxy + .interceptConversation({ name: 'conversation', response: 'I, the LLM, hear you!' }) + .completeAfterIntercept(), + ]; + + const messages: Message[] = [ + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.System, + content: 'You are a helpful assistant', + }, + }, + { + '@timestamp': new Date().toISOString(), + message: { + role: MessageRole.User, + content: 'Today we will be testing user instructions!', + }, + }, + ]; + + const createResponse = await observabilityAIAssistantAPIClient[user]({ + endpoint: 'POST /internal/observability_ai_assistant/chat/complete', + params: { + body: { + messages, + connectorId, + persist: true, + screenContexts: [], + scopes: ['observability'], + }, + }, + }); + expect(createResponse.status).to.be(200); + + await proxy.waitForAllInterceptorsSettled(); + const conversationCreatedEvent = getConversationCreatedEvent(createResponse.body); + const conversationId = conversationCreatedEvent.conversation.id; + + const res = await observabilityAIAssistantAPIClient[user]({ + endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', + params: { + path: { + conversationId, + }, + }, + }); + + // wait for all interceptors to be settled + await Promise.all(interceptPromises); + + const conversation = res.body; + return conversation; + } + + before(async () => { + proxy = await createLlmProxy(log); + connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ + port: proxy.getPort(), + }); + }); + + after(async () => { + proxy.close(); + await observabilityAIAssistantAPIClient.deleteActionConnector({ + actionId: connectorId, + }); + }); + + it('adds the instruction to the system prompt', async () => { + const conversation = await getConversationForUser('editor'); + const systemMessage = conversation.messages.find( + (message) => message.message.role === MessageRole.System + )!; + expect(systemMessage.message.content).to.contain(userInstructionText); + }); + + it('does not add the instruction to the context', async () => { + const conversation = await getConversationForUser('editor'); + const contextMessage = conversation.messages.find( + (message) => message.message.name === CONTEXT_FUNCTION_NAME + ); + + // there should be no suggestions with the user instruction + expect(contextMessage?.message.content).to.not.contain(userInstructionText); + expect(contextMessage?.message.data).to.not.contain(userInstructionText); + + // there should be no suggestions at all + expect(JSON.parse(contextMessage?.message.data!).suggestions.length).to.be(0); + }); + + it('does not add the instruction conversation for other users', async () => { + const conversation = await getConversationForUser('secondary_editor'); + const systemMessage = conversation.messages.find( + (message) => message.message.role === MessageRole.System + )!; + + expect(systemMessage.message.content).to.not.contain(userInstructionText); + expect(conversation.messages.length).to.be(5); + }); + }); + + describe('Instructions can be saved and cleared again', () => { + async function updateInstruction(text: string) { + const { status } = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: 'my-instruction-that-will-be-cleared', + text, + public: false, + }, + }, + }); + expect(status).to.be(200); + + const res = await observabilityAIAssistantAPIClient.editor({ + endpoint: 'GET /internal/observability_ai_assistant/kb/user_instructions', + }); + expect(res.status).to.be(200); + + return res.body.userInstructions[0].text; + } + + it('can clear the instruction', async () => { + const res1 = await updateInstruction('This is a user instruction that will be cleared'); + expect(res1).to.be('This is a user instruction that will be cleared'); + + const res2 = await updateInstruction(''); + expect(res2).to.be(''); + }); + }); + + describe('security roles and access privileges', () => { + describe('should deny access for users without the ai_assistant privilege', () => { + it('PUT /internal/observability_ai_assistant/kb/user_instructions', async () => { + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'PUT /internal/observability_ai_assistant/kb/user_instructions', + params: { + body: { + id: 'test-instruction', + text: 'Test user instruction', + public: true, + }, + }, + }); + + expect(status).to.be(403); + }); + + it('GET /internal/observability_ai_assistant/kb/user_instructions', async () => { + const { status } = await observabilityAIAssistantAPIClient.viewer({ + endpoint: 'GET /internal/observability_ai_assistant/kb/user_instructions', + }); + expect(status).to.be(403); + }); + }); + }); + }); +} diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/public_complete/public_complete.spec.ts b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/public_complete/public_complete.spec.ts similarity index 91% rename from x-pack/test/observability_ai_assistant_api_integration/tests/public_complete/public_complete.spec.ts rename to x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/public_complete/public_complete.spec.ts index a46266f1b4d06..ce15d20d4ad4d 100644 --- a/x-pack/test/observability_ai_assistant_api_integration/tests/public_complete/public_complete.spec.ts +++ b/x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/public_complete/public_complete.spec.ts @@ -19,14 +19,13 @@ import { isFunctionTitleRequest, LlmProxy, LlmResponseSimulator, -} from '../../common/create_llm_proxy'; -import { FtrProviderContext } from '../../common/ftr_provider_context'; -import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; +} from '../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy'; +import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context'; -export default function ApiTest({ getService }: FtrProviderContext) { - const supertest = getService('supertest'); +export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) { const log = getService('log'); - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); + const samlAuth = getService('samlAuth'); + const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi'); const messages: Message[] = [ { @@ -45,7 +44,9 @@ export default function ApiTest({ getService }: FtrProviderContext) { }, ]; - describe('/api/observability_ai_assistant/chat/complete', () => { + describe('/api/observability_ai_assistant/chat/complete', function () { + // TODO: https://github.com/elastic/kibana/issues/192751 + this.tags(['skipMKI']); let proxy: LlmProxy; let connectorId: string; @@ -70,7 +71,8 @@ export default function ApiTest({ getService }: FtrProviderContext) { (body) => !isFunctionTitleRequest(body) ); - const responsePromise = observabilityAIAssistantAPIClient.admin({ + const roleAuthc = await samlAuth.createM2mApiKeyWithRoleScope('admin'); + const responsePromise = observabilityAIAssistantAPIClient.publicApi({ endpoint: 'POST /api/observability_ai_assistant/chat/complete 2023-10-31', params: { query: { format }, @@ -82,6 +84,7 @@ export default function ApiTest({ getService }: FtrProviderContext) { instructions, }, }, + roleAuthc, }); const [conversationSimulator, titleSimulator] = await Promise.race([ @@ -134,11 +137,15 @@ export default function ApiTest({ getService }: FtrProviderContext) { before(async () => { proxy = await createLlmProxy(log); - connectorId = await createProxyActionConnector({ supertest, log, port: proxy.getPort() }); + connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({ + port: proxy.getPort(), + }); }); after(async () => { - await deleteActionConnector({ supertest, connectorId, log }); + await observabilityAIAssistantAPIClient.deleteActionConnector({ + actionId: connectorId, + }); proxy.close(); }); diff --git a/x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.index.ts b/x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.index.ts index 7544d7d90f1d5..edd82fc62dca6 100644 --- a/x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.index.ts +++ b/x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.index.ts @@ -21,5 +21,6 @@ export default function ({ loadTestFile }: DeploymentAgnosticFtrProviderContext) loadTestFile(require.resolve('../../apis/saved_objects_management')); loadTestFile(require.resolve('../../apis/observability/slo')); loadTestFile(require.resolve('../../apis/observability/synthetics')); + loadTestFile(require.resolve('../../apis/observability/ai_assistant')); }); } diff --git a/x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.index.ts b/x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.index.ts index 4f666fc5b3ebe..4f0c42e12b1fb 100644 --- a/x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.index.ts +++ b/x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.index.ts @@ -15,5 +15,6 @@ export default function ({ loadTestFile }: DeploymentAgnosticFtrProviderContext) loadTestFile(require.resolve('../../apis/observability/slo')); loadTestFile(require.resolve('../../apis/observability/synthetics')); loadTestFile(require.resolve('../../apis/observability/infra')); + loadTestFile(require.resolve('../../apis/observability/ai_assistant')); }); } diff --git a/x-pack/test/api_integration/deployment_agnostic/services/deployment_agnostic_services.ts b/x-pack/test/api_integration/deployment_agnostic/services/deployment_agnostic_services.ts index 08a085e2fcd9b..52223b69bcc86 100644 --- a/x-pack/test/api_integration/deployment_agnostic/services/deployment_agnostic_services.ts +++ b/x-pack/test/api_integration/deployment_agnostic/services/deployment_agnostic_services.ts @@ -21,7 +21,7 @@ export const deploymentAgnosticServices = _.pick(apiIntegrationServices, [ 'indexPatterns', 'ingestPipelines', 'kibanaServer', - // 'ml', depends on 'esDeleteAllIndices', can we make it deployment agnostic? + 'ml', 'randomness', 'retry', 'security', diff --git a/x-pack/test/api_integration/deployment_agnostic/services/index.ts b/x-pack/test/api_integration/deployment_agnostic/services/index.ts index 01dc52571ee5a..77d1ed51f5081 100644 --- a/x-pack/test/api_integration/deployment_agnostic/services/index.ts +++ b/x-pack/test/api_integration/deployment_agnostic/services/index.ts @@ -14,6 +14,7 @@ import { RoleScopedSupertestProvider, SupertestWithRoleScope } from './role_scop import { SloApiProvider } from './slo_api'; import { SynthtraceProvider } from './synthtrace'; import { ApmApiProvider } from './apm_api'; +import { ObservabilityAIAssistantApiProvider } from './observability_ai_assistant_api'; export type { InternalRequestHeader, @@ -33,6 +34,7 @@ export const services = { // create a new deployment-agnostic service and load here synthtrace: SynthtraceProvider, apmApi: ApmApiProvider, + observabilityAIAssistantApi: ObservabilityAIAssistantApiProvider, }; export type SupertestWithRoleScopeType = SupertestWithRoleScope; diff --git a/x-pack/test/api_integration/deployment_agnostic/services/observability_ai_assistant_api.ts b/x-pack/test/api_integration/deployment_agnostic/services/observability_ai_assistant_api.ts new file mode 100644 index 0000000000000..6da5f76432920 --- /dev/null +++ b/x-pack/test/api_integration/deployment_agnostic/services/observability_ai_assistant_api.ts @@ -0,0 +1,209 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { format } from 'url'; +import request from 'superagent'; +import type { + APIReturnType, + ObservabilityAIAssistantAPIClientRequestParamsOf as APIClientRequestParamsOf, + ObservabilityAIAssistantAPIEndpoint as APIEndpoint, +} from '@kbn/observability-ai-assistant-plugin/public'; +import { formatRequest } from '@kbn/server-route-repository'; +import { RoleCredentials } from '@kbn/ftr-common-functional-services'; +import type { DeploymentAgnosticFtrProviderContext } from '../ftr_provider_context'; + +type Options = { + type?: 'form-data'; + endpoint: TEndpoint; + spaceId?: string; +} & APIClientRequestParamsOf & { + params?: { query?: { _inspect?: boolean } }; + }; + +type InternalEndpoint = T extends `${string} /internal/${string}` + ? T + : never; + +type PublicEndpoint = T extends `${string} /api/${string}` ? T : never; + +function createObservabilityAIAssistantApiClient({ + getService, +}: DeploymentAgnosticFtrProviderContext) { + const supertestWithoutAuth = getService('supertestWithoutAuth'); + const samlAuth = getService('samlAuth'); + const logger = getService('log'); + + async function makeApiRequest({ + options, + headers, + }: { + options: Options; + headers: Record; + }): Promise> { + const { endpoint, type } = options; + + const params = 'params' in options ? (options.params as Record) : {}; + + const { method, pathname, version } = formatRequest(endpoint, params.path); + const pathnameWithSpaceId = options.spaceId ? `/s/${options.spaceId}${pathname}` : pathname; + const url = format({ pathname: pathnameWithSpaceId, query: params?.query }); + + logger.debug(`Calling observability_ai_assistant API: ${method.toUpperCase()} ${url}`); + + if (version) { + headers['Elastic-Api-Version'] = version; + } + + let res: request.Response; + + if (type === 'form-data') { + const fields: Array<[string, any]> = Object.entries(params.body); + const formDataRequest = supertestWithoutAuth[method](url) + .set(headers) + .set('Content-type', 'multipart/form-data'); + + for (const field of fields) { + void formDataRequest.field(field[0], field[1]); + } + + res = await formDataRequest; + } else if (params.body) { + res = await supertestWithoutAuth[method](url).send(params.body).set(headers); + } else { + res = await supertestWithoutAuth[method](url).set(headers); + } + + return res; + } + + function makeInternalApiRequest(role: string) { + return async >( + options: Options + ): Promise> => { + const headers: Record = { + ...samlAuth.getInternalRequestHeader(), + ...(await samlAuth.getM2MApiCookieCredentialsWithRoleScope(role)), + }; + + return makeApiRequest({ + options, + headers, + }); + }; + } + + function makePublicApiRequest() { + return async >( + options: Options & { + roleAuthc: RoleCredentials; + } + ): Promise> => { + const headers: Record = { + ...samlAuth.getInternalRequestHeader(), + ...options.roleAuthc.apiKeyHeader, + }; + + return makeApiRequest({ + options, + headers, + }); + }; + } + + async function deleteAllActionConnectors(): Promise { + const internalReqHeader = samlAuth.getInternalRequestHeader(); + const roleAuthc = await samlAuth.createM2mApiKeyWithRoleScope('admin'); + const res = await supertestWithoutAuth + .get(`/api/actions/connectors`) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader); + + const body = res.body as Array<{ id: string; connector_type_id: string; name: string }>; + return Promise.all(body.map(({ id }) => deleteActionConnector({ actionId: id }))); + } + + async function deleteActionConnector({ actionId }: { actionId: string }) { + const internalReqHeader = samlAuth.getInternalRequestHeader(); + const roleAuthc = await samlAuth.createM2mApiKeyWithRoleScope('admin'); + return supertestWithoutAuth + .delete(`/api/actions/connector/${actionId}`) + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader); + } + + async function createProxyActionConnector({ port }: { port: number }) { + const internalReqHeader = samlAuth.getInternalRequestHeader(); + const roleAuthc = await samlAuth.createM2mApiKeyWithRoleScope('admin'); + try { + const res = await supertestWithoutAuth + .post('/api/actions/connector') + .set(roleAuthc.apiKeyHeader) + .set(internalReqHeader) + .set('kbn-xsrf', 'foo') + .send({ + name: 'OpenAI Proxy', + connector_type_id: '.gen-ai', + config: { + apiProvider: 'OpenAI', + apiUrl: `http://localhost:${port}`, + }, + secrets: { + apiKey: 'my-api-key', + }, + }) + .expect(200); + + const connectorId = res.body.id as string; + return connectorId; + } catch (e) { + logger.error(`Failed to create action connector due to: ${e}`); + throw e; + } + } + + return { + makeInternalApiRequest, + makePublicApiRequest, + deleteAllActionConnectors, + deleteActionConnector, + createProxyActionConnector, + }; +} + +export type ApiSupertest = ReturnType; + +export class ApiError extends Error { + status: number; + + constructor(res: request.Response, endpoint: string) { + super(`Error calling ${endpoint}: ${res.status} - ${res.text}`); + this.name = 'ApiError'; + this.status = res.status; + } +} + +export interface SupertestReturnType { + status: number; + body: APIReturnType; +} + +export function ObservabilityAIAssistantApiProvider(context: DeploymentAgnosticFtrProviderContext) { + const observabilityAIAssistantApiClient = createObservabilityAIAssistantApiClient(context); + return { + admin: observabilityAIAssistantApiClient.makeInternalApiRequest('admin'), + viewer: observabilityAIAssistantApiClient.makeInternalApiRequest('viewer'), + editor: observabilityAIAssistantApiClient.makeInternalApiRequest('editor'), + publicApi: observabilityAIAssistantApiClient.makePublicApiRequest(), + deleteAllActionConnectors: observabilityAIAssistantApiClient.deleteAllActionConnectors, + createProxyActionConnector: observabilityAIAssistantApiClient.createProxyActionConnector, + deleteActionConnector: observabilityAIAssistantApiClient.deleteActionConnector, + }; +} + +export type ObservabilityAIAssistantApiClient = ReturnType< + typeof ObservabilityAIAssistantApiProvider +>; diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/complete/complete.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/complete/complete.spec.ts index ad4808ed8f03b..969d4098409d8 100644 --- a/x-pack/test/observability_ai_assistant_api_integration/tests/complete/complete.spec.ts +++ b/x-pack/test/observability_ai_assistant_api_integration/tests/complete/complete.spec.ts @@ -26,11 +26,7 @@ import { } from '../../common/create_llm_proxy'; import { createOpenAiChunk } from '../../common/create_openai_chunk'; import { FtrProviderContext } from '../../common/ftr_provider_context'; -import { - decodeEvents, - getConversationCreatedEvent, - getConversationUpdatedEvent, -} from '../conversations/helpers'; +import { decodeEvents, getConversationCreatedEvent, getConversationUpdatedEvent } from '../helpers'; import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; import { ForbiddenApiError } from '../../common/config'; diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/connectors/connectors.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/connectors/connectors.spec.ts deleted file mode 100644 index 42e1f8751719e..0000000000000 --- a/x-pack/test/observability_ai_assistant_api_integration/tests/connectors/connectors.spec.ts +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import expect from '@kbn/expect'; -import type { Agent as SuperTestAgent } from 'supertest'; -import { FtrProviderContext } from '../../common/ftr_provider_context'; -import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; -import { ForbiddenApiError } from '../../common/config'; - -export default function ApiTest({ getService }: FtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); - const supertest = getService('supertest'); - const log = getService('log'); - - const CONNECTOR_API_URL = '/internal/observability_ai_assistant/connectors'; - - describe('List connectors', () => { - before(async () => { - await deleteAllActionConnectors(supertest); - }); - - after(async () => { - await deleteAllActionConnectors(supertest); - }); - - it('Returns a 2xx for enterprise license', async () => { - await observabilityAIAssistantAPIClient - .editor({ - endpoint: `GET ${CONNECTOR_API_URL}`, - }) - .expect(200); - }); - - it('returns an empty list of connectors', async () => { - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: `GET ${CONNECTOR_API_URL}`, - }); - - expect(res.body.length).to.be(0); - }); - - it("returns the gen ai connector if it's been created", async () => { - const connectorId = await createProxyActionConnector({ supertest, log, port: 1234 }); - - const res = await observabilityAIAssistantAPIClient.editor({ - endpoint: `GET ${CONNECTOR_API_URL}`, - }); - - expect(res.body.length).to.be(1); - - await deleteActionConnector({ supertest, connectorId, log }); - }); - - describe('security roles and access privileges', () => { - it('should deny access for users without the ai_assistant privilege', async () => { - try { - await observabilityAIAssistantAPIClient.unauthorizedUser({ - endpoint: `GET ${CONNECTOR_API_URL}`, - }); - throw new ForbiddenApiError('Expected unauthorizedUser() to throw a 403 Forbidden error'); - } catch (e) { - expect(e.status).to.be(403); - } - }); - }); - }); -} - -export async function deleteAllActionConnectors(supertest: SuperTestAgent): Promise { - const res = await supertest.get(`/api/actions/connectors`); - - const body = res.body as Array<{ id: string; connector_type_id: string; name: string }>; - return Promise.all( - body.map(({ id }) => { - return supertest.delete(`/api/actions/connector/${id}`).set('kbn-xsrf', 'foo'); - }) - ); -} diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/conversations/conversations.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/conversations/conversations.spec.ts deleted file mode 100644 index bb85e99b99500..0000000000000 --- a/x-pack/test/observability_ai_assistant_api_integration/tests/conversations/conversations.spec.ts +++ /dev/null @@ -1,378 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import expect from '@kbn/expect'; -import { merge, omit } from 'lodash'; -import { - type ConversationCreateRequest, - type ConversationUpdateRequest, - MessageRole, -} from '@kbn/observability-ai-assistant-plugin/common/types'; -import type { FtrProviderContext } from '../../common/ftr_provider_context'; -import type { SupertestReturnType } from '../../common/observability_ai_assistant_api_client'; -import { ForbiddenApiError } from '../../common/config'; - -export default function ApiTest({ getService }: FtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); - - const conversationCreate: ConversationCreateRequest = { - '@timestamp': new Date().toISOString(), - conversation: { - title: 'My title', - }, - labels: {}, - numeric_labels: {}, - messages: [ - { - '@timestamp': new Date().toISOString(), - message: { - role: MessageRole.User, - content: 'My message', - }, - }, - ], - public: false, - }; - - const conversationUpdate: ConversationUpdateRequest = merge({}, conversationCreate, { - conversation: { - id: '', - title: 'My updated title', - }, - }); - - describe('Conversations', () => { - describe('without conversations', () => { - it('returns no conversations when listing', async () => { - const response = await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'POST /internal/observability_ai_assistant/conversations', - }) - .expect(200); - - expect(response.body).to.eql({ conversations: [] }); - }); - - it('returns a 404 for updating conversations', async () => { - await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: 'non-existing-conversation-id', - }, - body: { - conversation: conversationUpdate, - }, - }, - }) - .expect(404); - }); - - it('returns a 404 for retrieving a conversation', async () => { - await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: 'my-conversation-id', - }, - }, - }) - .expect(404); - }); - }); - - describe('when creating a conversation with the write user', () => { - let createResponse: Awaited< - SupertestReturnType<'POST /internal/observability_ai_assistant/conversation'> - >; - before(async () => { - createResponse = await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'POST /internal/observability_ai_assistant/conversation', - params: { - body: { - conversation: conversationCreate, - }, - }, - }) - .expect(200); - }); - - after(async () => { - await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - }, - }) - .expect(200); - - await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - }, - }) - .expect(404); - }); - - it('returns the conversation', () => { - expect(createResponse.body).to.eql({ - '@timestamp': createResponse.body['@timestamp'], - conversation: { - id: createResponse.body.conversation.id, - last_updated: createResponse.body.conversation.last_updated, - title: conversationCreate.conversation.title, - }, - labels: conversationCreate.labels, - numeric_labels: conversationCreate.numeric_labels, - messages: conversationCreate.messages, - namespace: 'default', - public: conversationCreate.public, - user: { - name: 'editor', - }, - }); - }); - - it('returns a 404 for updating a non-existing conversation', async () => { - await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: 'non-existing-conversation-id', - }, - body: { - conversation: conversationUpdate, - }, - }, - }) - .expect(404); - }); - - it('returns a 404 for retrieving a non-existing conversation', async () => { - await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: 'non-existing-conversation-id', - }, - }, - }) - .expect(404); - }); - - it('returns the conversation that was created', async () => { - const response = await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - }, - }) - .expect(200); - - expect(response.body).to.eql(createResponse.body); - }); - - it('returns the created conversation when listing', async () => { - const response = await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'POST /internal/observability_ai_assistant/conversations', - }) - .expect(200); - - expect(response.body.conversations[0]).to.eql(createResponse.body); - }); - - // TODO - it.skip('returns a 404 when reading it with another user', () => {}); - - describe('after updating', () => { - let updateResponse: Awaited< - SupertestReturnType<'PUT /internal/observability_ai_assistant/conversation/{conversationId}'> - >; - - before(async () => { - updateResponse = await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - body: { - conversation: merge(omit(conversationUpdate, 'conversation.id'), { - conversation: { id: createResponse.body.conversation.id }, - }), - }, - }, - }) - .expect(200); - }); - - it('returns the updated conversation as response', async () => { - expect(updateResponse.body.conversation.title).to.eql( - conversationUpdate.conversation.title - ); - }); - - it('returns the updated conversation after get', async () => { - const updateAfterCreateResponse = await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - }, - }) - .expect(200); - - expect(updateAfterCreateResponse.body.conversation.title).to.eql( - conversationUpdate.conversation.title - ); - }); - }); - }); - - describe('security roles and access privileges', () => { - describe('should deny access for users without the ai_assistant privilege', () => { - let createResponse: Awaited< - SupertestReturnType<'POST /internal/observability_ai_assistant/conversation'> - >; - before(async () => { - createResponse = await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'POST /internal/observability_ai_assistant/conversation', - params: { - body: { - conversation: conversationCreate, - }, - }, - }) - .expect(200); - }); - - after(async () => { - await observabilityAIAssistantAPIClient - .editor({ - endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - }, - }) - .expect(200); - }); - - it('POST /internal/observability_ai_assistant/conversation', async () => { - try { - await observabilityAIAssistantAPIClient.unauthorizedUser({ - endpoint: 'POST /internal/observability_ai_assistant/conversation', - params: { - body: { - conversation: conversationCreate, - }, - }, - }); - throw new ForbiddenApiError( - 'Expected unauthorizedUser() to throw a 403 Forbidden error' - ); - } catch (e) { - expect(e.status).to.be(403); - } - }); - - it('POST /internal/observability_ai_assistant/conversations', async () => { - try { - await observabilityAIAssistantAPIClient.unauthorizedUser({ - endpoint: 'POST /internal/observability_ai_assistant/conversations', - }); - throw new ForbiddenApiError( - 'Expected unauthorizedUser() to throw a 403 Forbidden error' - ); - } catch (e) { - expect(e.status).to.be(403); - } - }); - - it('PUT /internal/observability_ai_assistant/conversation/{conversationId}', async () => { - try { - await observabilityAIAssistantAPIClient.unauthorizedUser({ - endpoint: 'PUT /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - body: { - conversation: merge(omit(conversationUpdate, 'conversation.id'), { - conversation: { id: createResponse.body.conversation.id }, - }), - }, - }, - }); - throw new ForbiddenApiError( - 'Expected unauthorizedUser() to throw a 403 Forbidden error' - ); - } catch (e) { - expect(e.status).to.be(403); - } - }); - - it('GET /internal/observability_ai_assistant/conversation/{conversationId}', async () => { - try { - await observabilityAIAssistantAPIClient.unauthorizedUser({ - endpoint: 'GET /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - }, - }); - throw new ForbiddenApiError( - 'Expected unauthorizedUser() to throw a 403 Forbidden error' - ); - } catch (e) { - expect(e.status).to.be(403); - } - }); - - it('DELETE /internal/observability_ai_assistant/conversation/{conversationId}', async () => { - try { - await observabilityAIAssistantAPIClient.unauthorizedUser({ - endpoint: 'DELETE /internal/observability_ai_assistant/conversation/{conversationId}', - params: { - path: { - conversationId: createResponse.body.conversation.id, - }, - }, - }); - throw new ForbiddenApiError( - 'Expected unauthorizedUser() to throw a 403 Forbidden error' - ); - } catch (e) { - expect(e.status).to.be(403); - } - }); - }); - }); - }); -} diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/helpers.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/helpers.ts new file mode 100644 index 0000000000000..2e5d359ed1e78 --- /dev/null +++ b/x-pack/test/observability_ai_assistant_api_integration/tests/helpers.ts @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { Readable } from 'stream'; +import { + ConversationCreateEvent, + ConversationUpdateEvent, + StreamingChatResponseEvent, + StreamingChatResponseEventType, +} from '@kbn/observability-ai-assistant-plugin/common/conversation_complete'; + +export function decodeEvents(body: Readable | string) { + return String(body) + .split('\n') + .map((line) => line.trim()) + .filter(Boolean) + .map((line) => JSON.parse(line) as StreamingChatResponseEvent); +} + +export function getConversationCreatedEvent(body: Readable | string) { + const decodedEvents = decodeEvents(body); + const conversationCreatedEvent = decodedEvents.find( + (event) => event.type === StreamingChatResponseEventType.ConversationCreate + ) as ConversationCreateEvent; + + if (!conversationCreatedEvent) { + throw new Error( + `No conversation created event found: ${JSON.stringify(decodedEvents, null, 2)}` + ); + } + + return conversationCreatedEvent; +} + +export function getConversationUpdatedEvent(body: Readable | string) { + const decodedEvents = decodeEvents(body); + const conversationUpdatedEvent = decodedEvents.find( + (event) => event.type === StreamingChatResponseEventType.ConversationUpdate + ) as ConversationUpdateEvent; + + if (!conversationUpdatedEvent) { + throw new Error( + `No conversation created event found: ${JSON.stringify(decodedEvents, null, 2)}` + ); + } + + return conversationUpdatedEvent; +} diff --git a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_user_instructions.spec.ts b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_user_instructions.spec.ts index d5022a052d781..d09a0c1583130 100644 --- a/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_user_instructions.spec.ts +++ b/x-pack/test/observability_ai_assistant_api_integration/tests/knowledge_base/knowledge_base_user_instructions.spec.ts @@ -19,7 +19,7 @@ import { deleteInferenceEndpoint, deleteKnowledgeBaseModel, } from './helpers'; -import { getConversationCreatedEvent } from '../conversations/helpers'; +import { getConversationCreatedEvent } from '../helpers'; import { LlmProxy, createLlmProxy } from '../../common/create_llm_proxy'; import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; import { User } from '../../common/users/users'; diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/chat/chat.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/chat/chat.spec.ts deleted file mode 100644 index 40f3db279135e..0000000000000 --- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/chat/chat.spec.ts +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import expect from '@kbn/expect'; -import { MessageRole, type Message } from '@kbn/observability-ai-assistant-plugin/common'; -import { PassThrough } from 'stream'; -import { - LlmProxy, - createLlmProxy, -} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_llm_proxy'; -import { SupertestWithRoleScope } from '@kbn/test-suites-xpack/api_integration/deployment_agnostic/services/role_scoped_supertest'; -import { FtrProviderContext } from '../../common/ftr_provider_context'; -import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; -import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; - -export default function ApiTest({ getService }: FtrProviderContext) { - const supertestWithoutAuth = getService('supertestWithoutAuth'); - const svlUserManager = getService('svlUserManager'); - const svlCommonApi = getService('svlCommonApi'); - const log = getService('log'); - const roleScopedSupertest = getService('roleScopedSupertest'); - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); - - let supertestEditorWithCookieCredentials: SupertestWithRoleScope; - - const CHAT_API_URL = `/internal/observability_ai_assistant/chat`; - - const messages: Message[] = [ - { - '@timestamp': new Date().toISOString(), - message: { - role: MessageRole.System, - content: 'You are a helpful assistant', - }, - }, - { - '@timestamp': new Date().toISOString(), - message: { - role: MessageRole.User, - content: 'Good morning!', - }, - }, - ]; - - describe('/internal/observability_ai_assistant/chat', function () { - // TODO: https://github.com/elastic/kibana/issues/192751 - this.tags(['skipMKI']); - let proxy: LlmProxy; - let connectorId: string; - let roleAuthc: RoleCredentials; - let internalReqHeader: InternalRequestHeader; - - before(async () => { - roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); - internalReqHeader = svlCommonApi.getInternalRequestHeader(); - - supertestEditorWithCookieCredentials = await roleScopedSupertest.getSupertestWithRoleScope( - 'editor', - { - useCookieHeader: true, - withInternalHeaders: true, - } - ); - - proxy = await createLlmProxy(log); - connectorId = await createProxyActionConnector({ - supertest: supertestWithoutAuth, - log, - port: proxy.getPort(), - roleAuthc, - internalReqHeader, - }); - }); - - after(async () => { - proxy.close(); - await deleteActionConnector({ - supertest: supertestWithoutAuth, - connectorId, - log, - roleAuthc, - internalReqHeader, - }); - await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); - }); - - it("returns a 4xx if the connector doesn't exist", async () => { - await supertestEditorWithCookieCredentials - .post(CHAT_API_URL) - .send({ - name: 'my_api_call', - messages, - connectorId: 'does not exist', - functions: [], - scopes: ['all'], - }) - .expect(404); - }); - - it('returns a streaming response from the server', async () => { - const NUM_RESPONSES = 5; - - await Promise.race([ - new Promise((resolve, reject) => { - setTimeout(() => { - reject(new Error('Test timed out')); - }, 5000); - }), - new Promise((resolve, reject) => { - async function runTest() { - const interceptor = proxy.intercept('conversation', () => true); - const receivedChunks: Array> = []; - - const passThrough = new PassThrough(); - supertestEditorWithCookieCredentials - .post(CHAT_API_URL) - .on('error', reject) - .send({ - name: 'my_api_call', - messages, - connectorId, - functions: [], - scopes: ['all'], - }) - .pipe(passThrough); - - const simulator = await interceptor.waitForIntercept(); - - passThrough.on('data', (chunk) => { - receivedChunks.push(JSON.parse(chunk.toString())); - }); - - for (let i = 0; i < NUM_RESPONSES; i++) { - await simulator.next(`Part: ${i}\n`); - } - - await simulator.tokenCount({ completion: 20, prompt: 33, total: 53 }); - - await simulator.complete(); - - await new Promise((innerResolve) => passThrough.on('end', () => innerResolve())); - - const chatCompletionChunks = receivedChunks.filter( - (chunk) => chunk.type === 'chatCompletionChunk' - ); - expect(chatCompletionChunks).to.have.length( - NUM_RESPONSES, - `received number of chat completion chunks did not match expected. This might be because of a 4xx or 5xx: ${JSON.stringify( - chatCompletionChunks, - null, - 2 - )}` - ); - - const tokenCountChunk = receivedChunks.find((chunk) => chunk.type === 'tokenCount'); - expect(tokenCountChunk).to.eql( - { - type: 'tokenCount', - tokens: { completion: 20, prompt: 33, total: 53 }, - }, - `received token count chunk did not match expected` - ); - } - - runTest().then(resolve, reject); - }), - ]); - }); - - describe('security roles and access privileges', () => { - it('should deny access for users without the ai_assistant privilege', async () => { - await observabilityAIAssistantAPIClient - .slsUnauthorized({ - endpoint: `POST ${CHAT_API_URL}`, - params: { - body: { - name: 'my_api_call', - messages, - connectorId, - functions: [], - scopes: ['all'], - }, - }, - }) - .expect(403); - }); - }); - }); -} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/complete.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/complete.spec.ts index 47aa5018f810a..eaa7a7ef52dfd 100644 --- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/complete.spec.ts +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/complete/complete.spec.ts @@ -27,11 +27,7 @@ import { import { createOpenAiChunk } from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_openai_chunk'; import { SupertestWithRoleScope } from '@kbn/test-suites-xpack/api_integration/deployment_agnostic/services/role_scoped_supertest'; import { FtrProviderContext } from '../../common/ftr_provider_context'; -import { - decodeEvents, - getConversationCreatedEvent, - getConversationUpdatedEvent, -} from '../conversations/helpers'; +import { decodeEvents, getConversationCreatedEvent, getConversationUpdatedEvent } from '../helpers'; import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/connectors/connectors.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/connectors/connectors.spec.ts deleted file mode 100644 index 2c112f85fc219..0000000000000 --- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/connectors/connectors.spec.ts +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import expect from '@kbn/expect'; -import { FtrProviderContext } from '../../common/ftr_provider_context'; -import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; -import type { - InternalRequestHeader, - RoleCredentials, - SupertestWithoutAuthProviderType, -} from '../../../../../../shared/services'; - -export default function ApiTest({ getService }: FtrProviderContext) { - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); - const supertestWithoutAuth = getService('supertestWithoutAuth'); - const log = getService('log'); - const svlUserManager = getService('svlUserManager'); - const svlCommonApi = getService('svlCommonApi'); - - describe('List connectors', () => { - let roleAuthc: RoleCredentials; - let internalReqHeader: InternalRequestHeader; - - before(async () => { - roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('editor'); - internalReqHeader = svlCommonApi.getInternalRequestHeader(); - await deleteAllActionConnectors({ - supertest: supertestWithoutAuth, - roleAuthc, - internalReqHeader, - }); - }); - - after(async () => { - await deleteAllActionConnectors({ - supertest: supertestWithoutAuth, - roleAuthc, - internalReqHeader, - }); - await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); - }); - - it('Returns a 2xx for enterprise license', async () => { - await observabilityAIAssistantAPIClient - .slsEditor({ - endpoint: `GET /internal/observability_ai_assistant/connectors`, - }) - .expect(200); - }); - - it('returns an empty list of connectors', async () => { - const res = await observabilityAIAssistantAPIClient.slsEditor({ - endpoint: `GET /internal/observability_ai_assistant/connectors`, - }); - - expect(res.body.length).to.be(0); - }); - - it("returns the gen ai connector if it's been created", async () => { - const connectorId = await createProxyActionConnector({ - supertest: supertestWithoutAuth, - log, - port: 1234, - internalReqHeader, - roleAuthc, - }); - - const res = await observabilityAIAssistantAPIClient.slsEditor({ - endpoint: `GET /internal/observability_ai_assistant/connectors`, - }); - - expect(res.body.length).to.be(1); - - await deleteActionConnector({ - supertest: supertestWithoutAuth, - connectorId, - log, - internalReqHeader, - roleAuthc, - }); - }); - - describe('security roles and access privileges', () => { - it('should deny access for users without the ai_assistant privilege', async () => { - await observabilityAIAssistantAPIClient - .slsUnauthorized({ - endpoint: `GET /internal/observability_ai_assistant/connectors`, - }) - .expect(403); - }); - }); - }); -} - -export async function deleteAllActionConnectors({ - supertest, - roleAuthc, - internalReqHeader, -}: { - supertest: SupertestWithoutAuthProviderType; - roleAuthc: RoleCredentials; - internalReqHeader: InternalRequestHeader; -}): Promise { - const res = await supertest - .get(`/api/actions/connectors`) - .set(roleAuthc.apiKeyHeader) - .set(internalReqHeader); - - const body = res.body as Array<{ id: string; connector_type_id: string; name: string }>; - return Promise.all( - body.map(({ id }) => { - return supertest - .delete(`/api/actions/connector/${id}`) - .set(roleAuthc.apiKeyHeader) - .set(internalReqHeader); - }) - ); -} diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/helpers.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/helpers.ts similarity index 96% rename from x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/helpers.ts rename to x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/helpers.ts index cc35bb1a71298..a90342577cb4c 100644 --- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/conversations/helpers.ts +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/helpers.ts @@ -13,7 +13,7 @@ import { StreamingChatResponseEvent, StreamingChatResponseEventType, } from '@kbn/observability-ai-assistant-plugin/common/conversation_complete'; -import { ObservabilityAIAssistantApiClient } from '../../common/observability_ai_assistant_api_client'; +import { ObservabilityAIAssistantApiClient } from '../common/observability_ai_assistant_api_client'; export function decodeEvents(body: Readable | string) { return String(body) diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts index 162097400090b..d9f120edf73ed 100644 --- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts +++ b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/knowledge_base/knowledge_base_user_instructions.spec.ts @@ -18,7 +18,7 @@ import { deleteKnowledgeBaseModel, TINY_ELSER, } from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/knowledge_base/helpers'; -import { getConversationCreatedEvent } from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/conversations/helpers'; +import { getConversationCreatedEvent } from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/tests/helpers'; import { LlmProxy, createLlmProxy, diff --git a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/public_complete/public_complete.spec.ts b/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/public_complete/public_complete.spec.ts deleted file mode 100644 index 8c4cf35c0bf2c..0000000000000 --- a/x-pack/test_serverless/api_integration/test_suites/observability/ai_assistant/tests/public_complete/public_complete.spec.ts +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -import expect from '@kbn/expect'; -import { - FunctionDefinition, - MessageRole, - type Message, -} from '@kbn/observability-ai-assistant-plugin/common'; -import { type StreamingChatResponseEvent } from '@kbn/observability-ai-assistant-plugin/common/conversation_complete'; -import { pick } from 'lodash'; -import type OpenAI from 'openai'; -import { type AdHocInstruction } from '@kbn/observability-ai-assistant-plugin/common/types'; -import { - createLlmProxy, - isFunctionTitleRequest, - LlmProxy, - LlmResponseSimulator, -} from '@kbn/test-suites-xpack/observability_ai_assistant_api_integration/common/create_llm_proxy'; -import { FtrProviderContext } from '../../common/ftr_provider_context'; -import { createProxyActionConnector, deleteActionConnector } from '../../common/action_connectors'; -import type { InternalRequestHeader, RoleCredentials } from '../../../../../../shared/services'; -import { deleteAllConversations } from '../conversations/helpers'; - -export default function ApiTest({ getService }: FtrProviderContext) { - const supertest = getService('supertestWithoutAuth'); - const svlUserManager = getService('svlUserManager'); - const svlCommonApi = getService('svlCommonApi'); - const log = getService('log'); - const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantAPIClient'); - - const messages: Message[] = [ - { - '@timestamp': new Date().toISOString(), - message: { - role: MessageRole.System, - content: 'You are a helpful assistant', - }, - }, - { - '@timestamp': new Date().toISOString(), - message: { - role: MessageRole.User, - content: 'Good morning, bot!', - }, - }, - ]; - - describe('/api/observability_ai_assistant/chat/complete', function () { - // TODO: https://github.com/elastic/kibana/issues/192751 - this.tags(['skipMKI']); - - let proxy: LlmProxy; - let connectorId: string; - let roleAuthc: RoleCredentials; - let internalReqHeader: InternalRequestHeader; - - interface RequestOptions { - actions?: Array>; - instructions?: AdHocInstruction[]; - format?: 'openai' | 'default'; - } - - type ConversationSimulatorCallback = ( - conversationSimulator: LlmResponseSimulator - ) => Promise; - - async function getResponseBody( - { actions, instructions, format = 'default' }: RequestOptions, - conversationSimulatorCallback: ConversationSimulatorCallback - ) { - const titleInterceptor = proxy.intercept('title', (body) => isFunctionTitleRequest(body)); - - const conversationInterceptor = proxy.intercept( - 'conversation', - (body) => !isFunctionTitleRequest(body) - ); - - const responsePromise = observabilityAIAssistantAPIClient.slsUser({ - endpoint: 'POST /api/observability_ai_assistant/chat/complete 2023-10-31', - roleAuthc, - internalReqHeader, - params: { - query: { format }, - body: { - messages, - connectorId, - persist: true, - actions, - instructions, - }, - }, - }); - - const [conversationSimulator, titleSimulator] = await Promise.race([ - Promise.all([ - conversationInterceptor.waitForIntercept(), - titleInterceptor.waitForIntercept(), - ]), - // make sure any request failures (like 400s) are properly propagated - responsePromise.then(() => []), - ]); - - await titleSimulator.status(200); - await titleSimulator.next('My generated title'); - await titleSimulator.tokenCount({ completion: 1, prompt: 1, total: 2 }); - await titleSimulator.complete(); - - await conversationSimulator.status(200); - - if (conversationSimulatorCallback) { - await conversationSimulatorCallback(conversationSimulator); - } - - const response = await responsePromise; - - return String(response.body); - } - - async function getEvents( - options: RequestOptions, - conversationSimulatorCallback: ConversationSimulatorCallback - ) { - const responseBody = await getResponseBody(options, conversationSimulatorCallback); - - return responseBody - .split('\n') - .map((line) => line.trim()) - .filter(Boolean) - .map((line) => JSON.parse(line) as StreamingChatResponseEvent) - .slice(2); // ignore context request/response, we're testing this elsewhere - } - - async function getOpenAIResponse(conversationSimulatorCallback: ConversationSimulatorCallback) { - const responseBody = await getResponseBody( - { - format: 'openai', - }, - conversationSimulatorCallback - ); - - return responseBody; - } - - before(async () => { - proxy = await createLlmProxy(log); - roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('admin'); - internalReqHeader = svlCommonApi.getInternalRequestHeader(); - connectorId = await createProxyActionConnector({ - supertest, - log, - port: proxy.getPort(), - internalReqHeader, - roleAuthc, - }); - }); - - after(async () => { - await deleteAllConversations({ - observabilityAIAssistantAPIClient, - log, - }); - await deleteActionConnector({ supertest, connectorId, log, roleAuthc, internalReqHeader }); - proxy.close(); - await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc); - }); - - describe('after executing an action', () => { - let events: StreamingChatResponseEvent[]; - - before(async () => { - events = await getEvents( - { - actions: [ - { - name: 'my_action', - description: 'My action', - parameters: { - type: 'object', - properties: { - foo: { - type: 'string', - }, - }, - }, - }, - ], - }, - async (conversationSimulator) => { - await conversationSimulator.next({ - tool_calls: [ - { - id: 'fake-id', - index: 'fake-index', - function: { - name: 'my_action', - arguments: JSON.stringify({ foo: 'bar' }), - }, - }, - ], - }); - await conversationSimulator.tokenCount({ completion: 0, prompt: 0, total: 0 }); - await conversationSimulator.complete(); - } - ); - }); - - it('closes the stream without persisting the conversation', () => { - expect( - pick( - events[events.length - 1], - 'message.message.content', - 'message.message.function_call', - 'message.message.role' - ) - ).to.eql({ - message: { - message: { - content: '', - function_call: { - name: 'my_action', - arguments: JSON.stringify({ foo: 'bar' }), - trigger: MessageRole.Assistant, - }, - role: MessageRole.Assistant, - }, - }, - }); - }); - }); - - describe('after adding an instruction', () => { - let body: OpenAI.Chat.ChatCompletionCreateParamsNonStreaming; - - before(async () => { - await getEvents( - { - instructions: [ - { - text: 'This is a random instruction', - instruction_type: 'user_instruction', - }, - ], - }, - async (conversationSimulator) => { - body = conversationSimulator.body; - - await conversationSimulator.next({ - tool_calls: [ - { - id: 'fake-id', - index: 'fake-index', - function: { - name: 'my_action', - arguments: JSON.stringify({ foo: 'bar' }), - }, - }, - ], - }); - await conversationSimulator.tokenCount({ completion: 0, prompt: 0, total: 0 }); - await conversationSimulator.complete(); - } - ); - }); - - it.skip('includes the instruction in the system message', async () => { - expect(body.messages[0].content).to.contain('This is a random instruction'); - }); - }); - - describe('with openai format', () => { - let responseBody: string; - - before(async () => { - responseBody = await getOpenAIResponse(async (conversationSimulator) => { - await conversationSimulator.next('Hello'); - await conversationSimulator.tokenCount({ completion: 1, prompt: 1, total: 2 }); - await conversationSimulator.complete(); - }); - }); - - function extractDataParts(lines: string[]) { - return lines.map((line) => { - // .replace is easier, but we want to verify here whether - // it matches the SSE syntax (`data: ...`) - const [, dataPart] = line.match(/^data: (.*)$/) || ['', '']; - return dataPart.trim(); - }); - } - - function getLines() { - return responseBody.split('\n\n').filter(Boolean); - } - - it('outputs each line an SSE-compatible format (data: ...)', () => { - const lines = getLines(); - - lines.forEach((line) => { - expect(line.match(/^data: /)); - }); - }); - - it('ouputs one chunk, and one [DONE] event', () => { - const dataParts = extractDataParts(getLines()); - - expect(dataParts[0]).not.to.be.empty(); - expect(dataParts[1]).to.be('[DONE]'); - }); - - it('outuputs an OpenAI-compatible chunk', () => { - const [dataLine] = extractDataParts(getLines()); - - expect(() => { - JSON.parse(dataLine); - }).not.to.throwException(); - - const parsedChunk = JSON.parse(dataLine); - - expect(parsedChunk).to.eql({ - model: 'unknown', - choices: [ - { - delta: { - content: 'Hello', - }, - finish_reason: null, - index: 0, - }, - ], - object: 'chat.completion.chunk', - // just test that these are a string and a number - id: String(parsedChunk.id), - created: Number(parsedChunk.created), - }); - }); - }); - }); -}