Skip to content
Snippets Groups Projects
Unverified Commit 903759f9 authored by Manoj M J's avatar Manoj M J Committed by GitLab
Browse files

Remove VertexAI prompts for chat

parent b9da2017
No related branches found
No related tags found
No related merge requests found
Showing
with 7 additions and 350 deletions
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Concerns
module VertexAiPrompt
CHARACTERS_IN_TOKEN = 4
# source: https://cloud.google.com/vertex-ai/docs/generative-ai/learn/models
TOTAL_MODEL_TOKEN_LIMIT = 8192
# leave a 10% for cases where 1 token does not exactly match to 4 characters
INPUT_TOKEN_LIMIT = (TOTAL_MODEL_TOKEN_LIMIT * 0.9).to_i.freeze
# approximate that one token is ~4 characters.
MAX_CHARACTERS = (INPUT_TOKEN_LIMIT * CHARACTERS_IN_TOKEN).to_i.freeze
end
end
end
end
end
Loading
Loading
@@ -28,8 +28,7 @@ class Executor < Tool
 
PROVIDER_PROMPT_CLASSES = {
ai_gateway: ::Gitlab::Llm::Chain::Tools::CiEditorAssistant::Prompts::Anthropic,
anthropic: ::Gitlab::Llm::Chain::Tools::CiEditorAssistant::Prompts::Anthropic,
vertex_ai: ::Gitlab::Llm::Chain::Tools::CiEditorAssistant::Prompts::VertexAi
anthropic: ::Gitlab::Llm::Chain::Tools::CiEditorAssistant::Prompts::Anthropic
}.freeze
 
USER_TEMPLATE = Utils::Prompt.as_user("Question: %<input>s")
Loading
Loading
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Tools
module CiEditorAssistant
module Prompts
class VertexAi
def self.prompt(options)
prompt = Utils::Prompt.no_role_text(
::Gitlab::Llm::Chain::Tools::CiEditorAssistant::Executor::PROMPT_TEMPLATE, options
)
{
prompt: prompt,
options: {}
}
end
end
end
end
end
end
end
end
Loading
Loading
@@ -40,8 +40,7 @@ class Executor < Identifier
 
PROVIDER_PROMPT_CLASSES = {
ai_gateway: ::Gitlab::Llm::Chain::Tools::EpicReader::Prompts::Anthropic,
anthropic: ::Gitlab::Llm::Chain::Tools::EpicReader::Prompts::Anthropic,
vertex_ai: ::Gitlab::Llm::Chain::Tools::EpicReader::Prompts::VertexAi
anthropic: ::Gitlab::Llm::Chain::Tools::EpicReader::Prompts::Anthropic
}.freeze
 
SYSTEM_PROMPT = Utils::Prompt.as_system(
Loading
Loading
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Tools
module EpicReader
module Prompts
class VertexAi
include Concerns::VertexAiPrompt
def self.prompt(options)
prompt = Utils::Prompt.no_role_text(
::Gitlab::Llm::Chain::Tools::EpicReader::Executor::PROMPT_TEMPLATE, options
)
{
prompt: prompt,
options: {}
}
end
end
end
end
end
end
end
end
Loading
Loading
@@ -21,8 +21,7 @@ class Executor < SlashCommandTool
'can process this question.'
PROVIDER_PROMPT_CLASSES = {
ai_gateway: ::Gitlab::Llm::Chain::Tools::ExplainCode::Prompts::Anthropic,
anthropic: ::Gitlab::Llm::Chain::Tools::ExplainCode::Prompts::Anthropic,
vertex_ai: ::Gitlab::Llm::Chain::Tools::ExplainCode::Prompts::VertexAi
anthropic: ::Gitlab::Llm::Chain::Tools::ExplainCode::Prompts::Anthropic
}.freeze
 
PROMPT_TEMPLATE = [
Loading
Loading
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Tools
module ExplainCode
module Prompts
class VertexAi
include Concerns::VertexAiPrompt
def self.prompt(variables)
prompt = Utils::Prompt.no_role_text(
::Gitlab::Llm::Chain::Tools::ExplainCode::Executor::PROMPT_TEMPLATE, variables
)
{
prompt: prompt,
options: {}
}
end
end
end
end
end
end
end
end
Loading
Loading
@@ -26,8 +26,7 @@ def hello_world
TEXT
PROVIDER_PROMPT_CLASSES = {
ai_gateway: ::Gitlab::Llm::Chain::Tools::FixCode::Prompts::Anthropic,
anthropic: ::Gitlab::Llm::Chain::Tools::FixCode::Prompts::Anthropic,
vertex_ai: ::Gitlab::Llm::Chain::Tools::FixCode::Prompts::VertexAi
anthropic: ::Gitlab::Llm::Chain::Tools::FixCode::Prompts::Anthropic
}.freeze
 
PROMPT_TEMPLATE = [
Loading
Loading
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Tools
module FixCode
module Prompts
class VertexAi
include Concerns::VertexAiPrompt
def self.prompt(variables)
prompt = Utils::Prompt.no_role_text(
::Gitlab::Llm::Chain::Tools::FixCode::Executor::PROMPT_TEMPLATE, variables
)
{
prompt: prompt,
options: {}
}
end
end
end
end
end
end
end
end
Loading
Loading
@@ -40,8 +40,7 @@ class Executor < Identifier
 
PROVIDER_PROMPT_CLASSES = {
ai_gateway: ::Gitlab::Llm::Chain::Tools::IssueReader::Prompts::Anthropic,
anthropic: ::Gitlab::Llm::Chain::Tools::IssueReader::Prompts::Anthropic,
vertex_ai: ::Gitlab::Llm::Chain::Tools::IssueReader::Prompts::VertexAi
anthropic: ::Gitlab::Llm::Chain::Tools::IssueReader::Prompts::Anthropic
}.freeze
 
PROJECT_REGEX = {
Loading
Loading
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Tools
module IssueReader
module Prompts
class VertexAi
def self.prompt(options)
prompt = Utils::Prompt.no_role_text(
::Gitlab::Llm::Chain::Tools::IssueReader::Executor::PROMPT_TEMPLATE, options
)
Requests::VertexAi.prompt(prompt)
end
end
end
end
end
end
end
end
Loading
Loading
@@ -26,8 +26,7 @@ def hello_world
TEXT
PROVIDER_PROMPT_CLASSES = {
ai_gateway: ::Gitlab::Llm::Chain::Tools::RefactorCode::Prompts::Anthropic,
anthropic: ::Gitlab::Llm::Chain::Tools::RefactorCode::Prompts::Anthropic,
vertex_ai: ::Gitlab::Llm::Chain::Tools::RefactorCode::Prompts::VertexAi
anthropic: ::Gitlab::Llm::Chain::Tools::RefactorCode::Prompts::Anthropic
}.freeze
 
PROMPT_TEMPLATE = [
Loading
Loading
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Tools
module RefactorCode
module Prompts
class VertexAi
include Concerns::VertexAiPrompt
def self.prompt(variables)
prompt = Utils::Prompt.no_role_text(
::Gitlab::Llm::Chain::Tools::RefactorCode::Executor::PROMPT_TEMPLATE, variables
)
{
prompt: prompt,
options: {}
}
end
end
end
end
end
end
end
end
Loading
Loading
@@ -26,8 +26,7 @@ def hello_world
TEXT
PROVIDER_PROMPT_CLASSES = {
ai_gateway: ::Gitlab::Llm::Chain::Tools::WriteTests::Prompts::Anthropic,
anthropic: ::Gitlab::Llm::Chain::Tools::WriteTests::Prompts::Anthropic,
vertex_ai: ::Gitlab::Llm::Chain::Tools::WriteTests::Prompts::VertexAi
anthropic: ::Gitlab::Llm::Chain::Tools::WriteTests::Prompts::Anthropic
}.freeze
 
PROMPT_TEMPLATE = [
Loading
Loading
# frozen_string_literal: true
module Gitlab
module Llm
module Chain
module Tools
module WriteTests
module Prompts
class VertexAi
include Concerns::VertexAiPrompt
def self.prompt(variables)
prompt = Utils::Prompt.no_role_text(
::Gitlab::Llm::Chain::Tools::WriteTests::Executor::PROMPT_TEMPLATE, variables
)
{
prompt: prompt,
options: {}
}
end
end
end
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::Chain::Tools::CiEditorAssistant::Prompts::VertexAi, feature_category: :pipeline_composition do
describe '.prompt' do
it 'returns prompt' do
prompt = described_class.prompt({ input: 'foo' })[:prompt]
expect(prompt).to include('foo')
expect(prompt).to include(
<<~PROMPT
You are an ai assistant talking to a devops or software engineer.
You should coach users to author a ".gitlab-ci.yml" file which can be used to create a GitLab pipeline.
Please provide concrete and detailed yaml that implements what the user asks for as closely as possible, assuming a single yaml file will be used.
Think step by step to provide the most accurate solution to the user problem. Make sure that all the stages you've defined in the yaml file are actually used in it.
If you realise you require more input from the user, please describe what information is missing and ask them to provide it. Specifically check, if you have information about the application you're providing a configuration for, for example, the programming language used, or deployment targets.
If any configuration is missing, such as configuration variables, connection strings, secrets and so on, assume it will be taken from GitLab Ci/CD variables. Please include the variables configuration block that would use these Ci/CD variables.
Please include the commented sections explaining every configuration block, unless the user explicitly asks you to skip or not include comments.
PROMPT
)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::Chain::Tools::EpicReader::Prompts::VertexAi, feature_category: :duo_chat do
describe '.prompt' do
it 'returns prompt' do
options = {
input: 'foo?',
suggestions: "some suggestions"
}
prompt = described_class.prompt(options)[:prompt]
expect(prompt).to include('foo?')
expect(prompt).to include('some suggestions')
expect(prompt).to include('You can fetch information about a resource called: an epic or work item.')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::Chain::Tools::ExplainCode::Prompts::VertexAi, feature_category: :duo_chat do
describe '.prompt' do
it 'returns prompt' do
prompt = described_class
.prompt(
{ input: 'question', language_info: 'language', selected_text: 'selected text', file_content: 'file content' }
)[:prompt]
expected_prompt = <<~PROMPT.chomp
You are a software developer.
You can explain code snippets.
language
file content
Here is the code user selected:
<selected_code>
selected text
</selected_code>
question
Any code blocks in response should be formatted in markdown.
PROMPT
expect(prompt).to eq(expected_prompt)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::Chain::Tools::FixCode::Prompts::VertexAi, feature_category: :duo_chat do
describe '.prompt' do
it 'returns prompt' do
prompt = described_class
.prompt({ input: 'question', language_info: 'language', selected_text: 'selected text',
file_content: 'file content', file_content_reuse: 'code reuse note' })[:prompt]
expected_prompt = <<~PROMPT.chomp
You are a software developer.
You can analyze the given source code or text for errors.
Provide code snippet for the fixed code.
language
file content
In the file user selected this code:
<selected_code>
selected text
</selected_code>
question
code reuse note
Any code snippets in response should be formatted in markdown.
PROMPT
expect(prompt).to eq(expected_prompt)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Llm::Chain::Tools::IssueReader::Prompts::VertexAi, feature_category: :duo_chat do
describe '.prompt' do
it 'returns prompt' do
options = {
input: 'foo?',
suggestions: "some suggestions"
}
prompt = described_class.prompt(options)[:prompt]
expect(prompt).to include('foo?')
expect(prompt).to include('some suggestions')
expect(prompt).to include('You can fetch information about a resource called: an issue')
end
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment