Skip to content
Snippets Groups Projects
Unverified Commit d0f2a1d7 authored by Patrick Cyiza's avatar Patrick Cyiza Committed by GitLab
Browse files

Merge branch 'id-migrate-identifier-tools' into 'master'

Migrate prompts Duo Chat identifier tools

See merge request https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167401



Merged-by: default avatarPatrick Cyiza <jpcyiza@gitlab.com>
Approved-by: default avatarLesley Razzaghian <lrazzaghian@gitlab.com>
Approved-by: default avatarPatrick Cyiza <jpcyiza@gitlab.com>
Co-authored-by: default avatarIgor Drozdov <idrozdov@gitlab.com>
parents 538b0f11 2f5d0cb9
No related branches found
No related tags found
No related merge requests found
Showing
with 70 additions and 8 deletions
---
name: prompt_migration_ci_editor_assistant
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/475051
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167401
rollout_issue_url: https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/issues/595
milestone: '17.6'
group: group::custom models
type: experiment
default_enabled: false
---
name: prompt_migration_epic_reader
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/475052
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167401
rollout_issue_url: https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/issues/595
milestone: '17.6'
group: group::custom models
type: experiment
default_enabled: false
---
name: prompt_migration_issue_reader
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/475053
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167401
rollout_issue_url: https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/issues/595
milestone: '17.6'
group: group::custom models
type: experiment
default_enabled: false
Loading
Loading
@@ -7,6 +7,7 @@ module Tools
module CiEditorAssistant
class Executor < Tool
include Concerns::AiDependent
prepend Concerns::UseAiGatewayAgentPrompt
 
NAME = 'CiEditorAssistant'
HUMAN_NAME = 'CI Assistant'
Loading
Loading
Loading
Loading
@@ -7,6 +7,7 @@ module Tools
module EpicReader
class Executor < Identifier
include Concerns::ReaderTooling
prepend Concerns::UseAiGatewayAgentPrompt
 
RESOURCE_NAME = 'epic'
NAME = 'EpicReader'
Loading
Loading
Loading
Loading
@@ -7,6 +7,7 @@ module Tools
module IssueReader
class Executor < Identifier
include Concerns::ReaderTooling
prepend Concerns::UseAiGatewayAgentPrompt
 
RESOURCE_NAME = 'issue'
NAME = "IssueReader"
Loading
Loading
Loading
Loading
@@ -77,24 +77,24 @@ def provider_prompt_class
end
 
describe '#request' do
let(:tool) { ::Gitlab::Llm::Chain::Tools::IssueReader::Executor.new(context: context, options: options) }
let(:prompt_options) { tool.prompt.deep_merge({ options: { inputs: options, use_ai_gateway_agent_prompt: true } }) }
before do
allow(Gitlab::Llm::Logger).to receive(:build).and_return(logger)
allow(logger).to receive(:conditional_info)
end
 
it 'passes prompt and unit primitive to the ai_client' do
tool = ::Gitlab::Llm::Chain::Tools::IssueReader::Executor.new(context: context, options: options)
expect(ai_request).to receive(:request).with(tool.prompt, unit_primitive: nil)
expect(ai_request).to receive(:request).with(prompt_options, unit_primitive: 'issue_reader')
 
tool.request
end
 
it 'passes blocks forward to the ai_client' do
b = proc { "something" }
tool = ::Gitlab::Llm::Chain::Tools::IssueReader::Executor.new(context: context, options: options)
 
expect(ai_request).to receive(:request).with(tool.prompt, unit_primitive: nil, &b)
expect(ai_request).to receive(:request).with(prompt_options, unit_primitive: 'issue_reader', &b)
 
tool.request(&b)
end
Loading
Loading
@@ -136,7 +136,6 @@ def prompt_options
end
 
it 'logs the request', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/463465' do
tool = ::Gitlab::Llm::Chain::Tools::IssueReader::Executor.new(context: context, options: options)
expected_prompt = tool.prompt[:prompt]
 
tool.request
Loading
Loading
@@ -144,5 +143,17 @@ def prompt_options
expect(logger).to have_received(:conditional_info).with(context.current_user, a_hash_including(
message: "Content of the prompt from chat request", klass: tool.class.to_s, prompt: expected_prompt))
end
context 'when prompt_migration_issue_reader feature flag is disabled' do
before do
stub_feature_flags(prompt_migration_issue_reader: false)
end
it 'does not send params to use ai gateway prompt' do
expect(ai_request).to receive(:request).with(tool.prompt, unit_primitive: nil)
tool.request
end
end
end
end
Loading
Loading
@@ -68,6 +68,11 @@
expect(answer.error_code).to include("M4002")
end
end
it_behaves_like 'uses ai gateway agent prompt' do
let(:prompt_class) { Gitlab::Llm::Chain::Tools::CiEditorAssistant::Prompts::Anthropic }
let(:unit_primitive) { 'ci_editor_assistant' }
end
end
 
context 'when code tool was already used' do
Loading
Loading
Loading
Loading
@@ -68,12 +68,15 @@
context 'when epic is identified' do
let_it_be(:epic1) { create(:epic, group: group) }
let_it_be(:epic2) { create(:epic, group: group) }
let(:ai_request_double) { instance_double(Gitlab::Llm::Chain::Requests::AiGateway) }
let(:context) do
Gitlab::Llm::Chain::GitlabContext.new(
container: group,
resource: epic1,
current_user: user,
ai_request: double
ai_request: ai_request_double
)
end
 
Loading
Loading
@@ -309,6 +312,11 @@
end
end
end
it_behaves_like 'uses ai gateway agent prompt' do
let(:prompt_class) { Gitlab::Llm::Chain::Tools::EpicReader::Prompts::Anthropic }
let(:unit_primitive) { 'epic_reader' }
end
end
 
describe '#get_resources' do
Loading
Loading
Loading
Loading
@@ -69,12 +69,15 @@
context 'when issue is identified' do
let_it_be(:issue1) { create(:issue, project: project) }
let_it_be(:issue2) { create(:issue, project: project) }
let(:ai_request_double) { instance_double(Gitlab::Llm::Chain::Requests::AiGateway) }
let(:context) do
Gitlab::Llm::Chain::GitlabContext.new(
container: project,
resource: issue1,
current_user: user,
ai_request: double
ai_request: ai_request_double
)
end
 
Loading
Loading
@@ -291,6 +294,11 @@
expect(tool.execute.content).to eq(response)
end
end
it_behaves_like 'uses ai gateway agent prompt' do
let(:prompt_class) { Gitlab::Llm::Chain::Tools::IssueReader::Prompts::Anthropic }
let(:unit_primitive) { 'issue_reader' }
end
end
end
end
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment