Skip to content
Snippets Groups Projects
Unverified Commit 621b731d authored by John T Skarbek's avatar John T Skarbek
Browse files

Merge commit '6d000c9f' into 11-10-stable

parents b6ff7958 6d000c9f
No related branches found
No related tags found
No related merge requests found
Showing
with 548 additions and 124 deletions
# frozen_string_literal: true
require 'spec_helper'
describe Types::BaseField do
context 'when considering complexity' do
it 'defaults to 1' do
field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true)
expect(field.to_graphql.complexity).to eq 1
end
it 'has specified value' do
field = described_class.new(name: 'test', type: GraphQL::STRING_TYPE, null: true, complexity: 12)
expect(field.to_graphql.complexity).to eq 12
end
end
end
Loading
Loading
@@ -18,4 +18,56 @@ describe WikiHelper do
end
end
end
describe '#wiki_sort_controls' do
let(:project) { create(:project) }
let(:wiki_link) { helper.wiki_sort_controls(project, sort, direction) }
let(:classes) { "btn btn-default has-tooltip reverse-sort-btn qa-reverse-sort" }
def expected_link(sort, direction, icon_class)
path = "/#{project.full_path}/wikis/pages?direction=#{direction}&sort=#{sort}"
helper.link_to(path, type: 'button', class: classes, title: 'Sort direction') do
helper.sprite_icon("sort-#{icon_class}", size: 16)
end
end
context 'initial call' do
let(:sort) { nil }
let(:direction) { nil }
it 'renders with default values' do
expect(wiki_link).to eq(expected_link('title', 'desc', 'lowest'))
end
end
context 'sort by title' do
let(:sort) { 'title' }
let(:direction) { 'asc' }
it 'renders a link with opposite direction' do
expect(wiki_link).to eq(expected_link('title', 'desc', 'lowest'))
end
end
context 'sort by created_at' do
let(:sort) { 'created_at' }
let(:direction) { 'desc' }
it 'renders a link with opposite direction' do
expect(wiki_link).to eq(expected_link('created_at', 'asc', 'highest'))
end
end
end
describe '#wiki_sort_title' do
it 'returns a title corresponding to a key' do
expect(helper.wiki_sort_title('created_at')).to eq('Created date')
expect(helper.wiki_sort_title('title')).to eq('Title')
end
it 'defaults to Title if a key is unknown' do
expect(helper.wiki_sort_title('unknown')).to eq('Title')
end
end
end
Loading
Loading
@@ -11,6 +11,8 @@ describe('IDE pipelines list', () => {
let vm;
let mock;
 
const findLoadingState = () => vm.$el.querySelector('.loading-container');
beforeEach(done => {
const store = createStore();
 
Loading
Loading
@@ -95,7 +97,7 @@ describe('IDE pipelines list', () => {
 
describe('empty state', () => {
it('renders pipelines empty state', done => {
vm.$store.state.pipelines.latestPipeline = false;
vm.$store.state.pipelines.latestPipeline = null;
 
vm.$nextTick(() => {
expect(vm.$el.querySelector('.empty-state')).not.toBe(null);
Loading
Loading
@@ -106,15 +108,30 @@ describe('IDE pipelines list', () => {
});
 
describe('loading state', () => {
it('renders loading state when there is no latest pipeline', done => {
vm.$store.state.pipelines.latestPipeline = null;
beforeEach(() => {
vm.$store.state.pipelines.isLoadingPipeline = true;
});
 
vm.$nextTick(() => {
expect(vm.$el.querySelector('.loading-container')).not.toBe(null);
it('does not render when pipeline has loaded before', done => {
vm.$store.state.pipelines.hasLoadedPipeline = true;
 
done();
});
vm.$nextTick()
.then(() => {
expect(findLoadingState()).toBe(null);
})
.then(done)
.catch(done.fail);
});
it('renders loading state when there is no latest pipeline', done => {
vm.$store.state.pipelines.hasLoadedPipeline = false;
vm.$nextTick()
.then(() => {
expect(findLoadingState()).not.toBe(null);
})
.then(done)
.catch(done.fail);
});
});
});
Loading
Loading
@@ -27,63 +27,71 @@ describe('IDE pipelines mutations', () => {
});
 
describe(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, () => {
it('sets loading to false on success', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](
mockedState,
fullPipelinesResponse.data.pipelines[0],
);
const itSetsPipelineLoadingStates = () => {
it('sets has loaded to true', () => {
expect(mockedState.hasLoadedPipeline).toBe(true);
});
 
expect(mockedState.isLoadingPipeline).toBe(false);
});
it('sets loading to false on success', () => {
expect(mockedState.isLoadingPipeline).toBe(false);
});
};
describe('with pipeline', () => {
beforeEach(() => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](
mockedState,
fullPipelinesResponse.data.pipelines[0],
);
});
 
it('sets latestPipeline', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](
mockedState,
fullPipelinesResponse.data.pipelines[0],
);
itSetsPipelineLoadingStates();
 
expect(mockedState.latestPipeline).toEqual({
id: '51',
path: 'test',
commit: { id: '123' },
details: { status: jasmine.any(Object) },
yamlError: undefined,
it('sets latestPipeline', () => {
expect(mockedState.latestPipeline).toEqual({
id: '51',
path: 'test',
commit: { id: '123' },
details: { status: jasmine.any(Object) },
yamlError: undefined,
});
});
});
 
it('does not set latest pipeline if pipeline is null', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, null);
expect(mockedState.latestPipeline).toEqual(false);
it('sets stages', () => {
expect(mockedState.stages.length).toBe(2);
expect(mockedState.stages).toEqual([
{
id: 0,
dropdownPath: stages[0].dropdown_path,
name: stages[0].name,
status: stages[0].status,
isCollapsed: false,
isLoading: false,
jobs: [],
},
{
id: 1,
dropdownPath: stages[1].dropdown_path,
name: stages[1].name,
status: stages[1].status,
isCollapsed: false,
isLoading: false,
jobs: [],
},
]);
});
});
 
it('sets stages', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](
mockedState,
fullPipelinesResponse.data.pipelines[0],
);
describe('with null', () => {
beforeEach(() => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, null);
});
 
expect(mockedState.stages.length).toBe(2);
expect(mockedState.stages).toEqual([
{
id: 0,
dropdownPath: stages[0].dropdown_path,
name: stages[0].name,
status: stages[0].status,
isCollapsed: false,
isLoading: false,
jobs: [],
},
{
id: 1,
dropdownPath: stages[1].dropdown_path,
name: stages[1].name,
status: stages[1].status,
isCollapsed: false,
isLoading: false,
jobs: [],
},
]);
itSetsPipelineLoadingStates();
it('does not set latest pipeline if pipeline is null', () => {
expect(mockedState.latestPipeline).toEqual(null);
});
});
});
 
Loading
Loading
Loading
Loading
@@ -101,6 +101,32 @@ describe Gitlab::Ci::Build::Policy::Refs do
expect(described_class.new(['/fix-.*/']))
.not_to be_satisfied_by(pipeline)
end
context 'when unsafe regexp is used' do
let(:subject) { described_class.new(['/^(?!master).+/']) }
context 'when allow_unsafe_ruby_regexp is disabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: false)
end
it 'ignores invalid regexp' do
expect(subject)
.not_to be_satisfied_by(pipeline)
end
end
context 'when allow_unsafe_ruby_regexp is enabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: true)
end
it 'is satisfied by regexp' do
expect(subject)
.to be_satisfied_by(pipeline)
end
end
end
end
 
context 'malicious regexp' do
Loading
Loading
require 'fast_spec_helper'
require 'support/helpers/stub_feature_flags'
require_dependency 'active_model'
 
describe Gitlab::Ci::Config::Entry::Policy do
Loading
Loading
@@ -33,6 +34,44 @@ describe Gitlab::Ci::Config::Entry::Policy do
end
end
 
context 'when config is an empty regexp' do
let(:config) { ['//'] }
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when using unsafe regexp' do
include StubFeatureFlags
let(:config) { ['/^(?!master).+/'] }
subject { described_class.new([regexp]) }
context 'when allow_unsafe_ruby_regexp is disabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: false)
end
it 'is not valid' do
expect(entry).not_to be_valid
end
end
context 'when allow_unsafe_ruby_regexp is enabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: true)
end
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when config is a special keyword' do
let(:config) { %w[tags triggers branches] }
 
Loading
Loading
@@ -67,6 +106,34 @@ describe Gitlab::Ci::Config::Entry::Policy do
end
end
 
context 'when using unsafe regexp' do
include StubFeatureFlags
let(:config) { { refs: ['/^(?!master).+/'] } }
subject { described_class.new([regexp]) }
context 'when allow_unsafe_ruby_regexp is disabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: false)
end
it 'is not valid' do
expect(entry).not_to be_valid
end
end
context 'when allow_unsafe_ruby_regexp is enabled' do
before do
stub_feature_flags(allow_unsafe_ruby_regexp: true)
end
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when specifying kubernetes policy' do
let(:config) { { kubernetes: 'active' } }
 
Loading
Loading
Loading
Loading
@@ -9,55 +9,88 @@ describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
let(:current_user) { double(:current_user) }
let(:abilities) { [double(:first_ability), double(:last_ability)] }
 
let(:checker) do
service = described_class.new(double(resolve_proc: proc {}))
allow(service).to receive(:authorizations).and_return(abilities)
service.__send__(:build_checker, current_user)
end
context 'when authorizing against the object' do
let(:checker) do
service = described_class.new(double(resolve_proc: proc {}))
allow(service).to receive(:authorizations).and_return(abilities)
service.__send__(:build_checker, current_user, nil)
end
 
it 'returns a checker which checks for a single object' do
object = double(:object)
it 'returns a checker which checks for a single object' do
object = double(:object)
 
abilities.each do |ability|
spy_ability_check_for(ability, object, passed: true)
end
abilities.each do |ability|
spy_ability_check_for(ability, object, passed: true)
end
 
expect(checker.call(object)).to eq(object)
end
expect(checker.call(object)).to eq(object)
end
 
it 'returns a checker which checks for all objects' do
objects = [double(:first), double(:last)]
it 'returns a checker which checks for all objects' do
objects = [double(:first), double(:last)]
 
abilities.each do |ability|
objects.each do |object|
spy_ability_check_for(ability, object, passed: true)
abilities.each do |ability|
objects.each do |object|
spy_ability_check_for(ability, object, passed: true)
end
end
expect(checker.call(objects)).to eq(objects)
end
 
expect(checker.call(objects)).to eq(objects)
end
context 'when some objects would not pass the check' do
it 'returns nil when it is single object' do
disallowed = double(:object)
spy_ability_check_for(abilities.first, disallowed, passed: false)
 
context 'when some objects would not pass the check' do
it 'returns nil when it is single object' do
disallowed = double(:object)
expect(checker.call(disallowed)).to be_nil
end
it 'returns only objects which passed when there are more than one' do
allowed = double(:allowed)
disallowed = double(:disallowed)
 
spy_ability_check_for(abilities.first, disallowed, passed: false)
spy_ability_check_for(abilities.first, disallowed, passed: false)
 
expect(checker.call(disallowed)).to be_nil
abilities.each do |ability|
spy_ability_check_for(ability, allowed, passed: true)
end
expect(checker.call([disallowed, allowed])).to contain_exactly(allowed)
end
end
end
context 'when authorizing against another object' do
let(:authorizing_obj) { double(:object) }
 
it 'returns only objects which passed when there are more than one' do
allowed = double(:allowed)
disallowed = double(:disallowed)
let(:checker) do
service = described_class.new(double(resolve_proc: proc {}))
allow(service).to receive(:authorizations).and_return(abilities)
service.__send__(:build_checker, current_user, authorizing_obj)
end
it 'returns a checker which checks for a single object' do
object = double(:object)
abilities.each do |ability|
spy_ability_check_for(ability, authorizing_obj, passed: true)
end
expect(checker.call(object)).to eq(object)
end
 
spy_ability_check_for(abilities.first, disallowed, passed: false)
it 'returns a checker which checks for all objects' do
objects = [double(:first), double(:last)]
 
abilities.each do |ability|
spy_ability_check_for(ability, allowed, passed: true)
objects.each do |object|
spy_ability_check_for(ability, authorizing_obj, passed: true)
end
end
 
expect(checker.call([disallowed, allowed]))
.to contain_exactly(allowed)
expect(checker.call(objects)).to eq(objects)
end
end
end
Loading
Loading
Loading
Loading
@@ -230,4 +230,32 @@ describe Gitlab::PrometheusClient do
let(:execute_query) { subject.query_range(prometheus_query) }
end
end
describe '.compute_step' do
using RSpec::Parameterized::TableSyntax
let(:now) { Time.now.utc }
subject { described_class.compute_step(start, stop) }
where(:time_interval_in_seconds, :step) do
0 | 60
10.hours | 60
10.hours + 1 | 61
# frontend options
30.minutes | 60
3.hours | 60
8.hours | 60
1.day | 144
3.days | 432
1.week | 1008
end
with_them do
let(:start) { now - time_interval_in_seconds }
let(:stop) { now }
it { is_expected.to eq(step) }
end
end
end
require 'fast_spec_helper'
require 'support/shared_examples/malicious_regexp_shared_examples'
require 'support/helpers/stub_feature_flags'
 
describe Gitlab::UntrustedRegexp::RubySyntax do
describe '.matches_syntax?' do
Loading
Loading
@@ -33,6 +34,12 @@ describe Gitlab::UntrustedRegexp::RubySyntax do
end
end
 
context 'when regexp is empty' do
it 'fabricates regexp correctly' do
expect(described_class.fabricate('//')).not_to be_nil
end
end
context 'when regexp is a raw pattern' do
it 'returns error' do
expect(described_class.fabricate('some .* thing')).to be_nil
Loading
Loading
@@ -41,24 +48,63 @@ describe Gitlab::UntrustedRegexp::RubySyntax do
end
 
describe '.fabricate!' do
context 'when regexp is using /regexp/ scheme with flags' do
it 'fabricates regexp with a single flag' do
regexp = described_class.fabricate!('/something/i')
context 'safe regexp is used' do
context 'when regexp is using /regexp/ scheme with flags' do
it 'fabricates regexp with a single flag' do
regexp = described_class.fabricate!('/something/i')
expect(regexp).to eq Gitlab::UntrustedRegexp.new('(?i)something')
expect(regexp.scan('SOMETHING')).to be_one
end
 
expect(regexp).to eq Gitlab::UntrustedRegexp.new('(?i)something')
expect(regexp.scan('SOMETHING')).to be_one
it 'fabricates regexp with multiple flags' do
regexp = described_class.fabricate!('/something/im')
expect(regexp).to eq Gitlab::UntrustedRegexp.new('(?im)something')
end
it 'fabricates regexp without flags' do
regexp = described_class.fabricate!('/something/')
expect(regexp).to eq Gitlab::UntrustedRegexp.new('something')
end
end
end
 
it 'fabricates regexp with multiple flags' do
regexp = described_class.fabricate!('/something/im')
context 'when unsafe regexp is used' do
include StubFeatureFlags
 
expect(regexp).to eq Gitlab::UntrustedRegexp.new('(?im)something')
before do
stub_feature_flags(allow_unsafe_ruby_regexp: true)
allow(Gitlab::UntrustedRegexp).to receive(:new).and_raise(RegexpError)
end
 
it 'fabricates regexp without flags' do
regexp = described_class.fabricate!('/something/')
context 'when no fallback is enabled' do
it 'raises an exception' do
expect { described_class.fabricate!('/something/') }
.to raise_error(RegexpError)
end
end
context 'when fallback is used' do
it 'fabricates regexp with a single flag' do
regexp = described_class.fabricate!('/something/i', fallback: true)
expect(regexp).to eq Regexp.new('something', Regexp::IGNORECASE)
end
it 'fabricates regexp with multiple flags' do
regexp = described_class.fabricate!('/something/im', fallback: true)
expect(regexp).to eq Regexp.new('something', Regexp::IGNORECASE | Regexp::MULTILINE)
end
it 'fabricates regexp without flags' do
regexp = described_class.fabricate!('/something/', fallback: true)
 
expect(regexp).to eq Gitlab::UntrustedRegexp.new('something')
expect(regexp).to eq Regexp.new('something')
end
end
end
 
Loading
Loading
Loading
Loading
@@ -94,7 +94,7 @@ describe Gitlab::Workhorse do
end
end
 
describe '.terminal_websocket' do
describe '.channel_websocket' do
def terminal(ca_pem: nil)
out = {
subprotocols: ['foo'],
Loading
Loading
@@ -108,25 +108,25 @@ describe Gitlab::Workhorse do
 
def workhorse(ca_pem: nil)
out = {
'Terminal' => {
'Channel' => {
'Subprotocols' => ['foo'],
'Url' => 'wss://example.com/terminal.ws',
'Header' => { 'Authorization' => ['Token x'] },
'MaxSessionTime' => 600
}
}
out['Terminal']['CAPem'] = ca_pem if ca_pem
out['Channel']['CAPem'] = ca_pem if ca_pem
out
end
 
context 'without ca_pem' do
subject { described_class.terminal_websocket(terminal) }
subject { described_class.channel_websocket(terminal) }
 
it { is_expected.to eq(workhorse) }
end
 
context 'with ca_pem' do
subject { described_class.terminal_websocket(terminal(ca_pem: "foo")) }
subject { described_class.channel_websocket(terminal(ca_pem: "foo")) }
 
it { is_expected.to eq(workhorse(ca_pem: "foo")) }
end
Loading
Loading
Loading
Loading
@@ -13,25 +13,33 @@ describe Ci::BuildRunnerSession, model: true do
it { is_expected.to validate_presence_of(:url).with_message('must be a valid URL') }
 
describe '#terminal_specification' do
let(:terminal_specification) { subject.terminal_specification }
let(:specification) { subject.terminal_specification }
it 'returns terminal.gitlab.com protocol' do
expect(specification[:subprotocols]).to eq ['terminal.gitlab.com']
end
it 'returns a wss url' do
expect(specification[:url]).to start_with('wss://')
end
 
it 'returns empty hash if no url' do
subject.url = ''
 
expect(terminal_specification).to be_empty
expect(specification).to be_empty
end
 
context 'when url is present' do
it 'returns ca_pem nil if empty certificate' do
subject.certificate = ''
 
expect(terminal_specification[:ca_pem]).to be_nil
expect(specification[:ca_pem]).to be_nil
end
 
it 'adds Authorization header if authorization is present' do
subject.authorization = 'whatever'
 
expect(terminal_specification[:headers]).to include(Authorization: ['whatever'])
expect(specification[:headers]).to include(Authorization: ['whatever'])
end
end
end
Loading
Loading
Loading
Loading
@@ -16,6 +16,10 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
 
attr_reader :id
 
def self.primary_key
:id
end
def initialize(id, &blk)
@id = id
@calculator = blk
Loading
Loading
@@ -106,6 +110,46 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
end
end
 
describe '.reactive_cache_worker_finder' do
context 'with default reactive_cache_worker_finder' do
let(:args) { %w(other args) }
before do
allow(instance.class).to receive(:find_by).with(id: instance.id)
.and_return(instance)
end
it 'calls the activerecord find_by method' do
result = instance.class.reactive_cache_worker_finder.call(instance.id, *args)
expect(result).to eq(instance)
expect(instance.class).to have_received(:find_by).with(id: instance.id)
end
end
context 'with custom reactive_cache_worker_finder' do
let(:args) { %w(arg1 arg2) }
let(:instance) { CustomFinderCacheTest.new(666, &calculation) }
class CustomFinderCacheTest < CacheTest
self.reactive_cache_worker_finder = ->(_id, *args) { from_cache(*args) }
def self.from_cache(*args); end
end
before do
allow(instance.class).to receive(:from_cache).with(*args).and_return(instance)
end
it 'overrides the default reactive_cache_worker_finder' do
result = instance.class.reactive_cache_worker_finder.call(instance.id, *args)
expect(result).to eq(instance)
expect(instance.class).to have_received(:from_cache).with(*args)
end
end
end
describe '#clear_reactive_cache!' do
before do
stub_reactive_cache(instance, 4)
Loading
Loading
Loading
Loading
@@ -20,12 +20,16 @@ describe WikiPage do
context 'when there are pages' do
before do
create_page('dir_1/dir_1_1/page_3', 'content')
create_page('page_1', 'content')
create_page('dir_1/page_2', 'content')
create_page('dir_2/page_5', 'content')
create_page('page_6', 'content')
create_page('dir_2/page_4', 'content')
create_page('page_1', 'content')
end
let(:page_1) { wiki.find_page('page_1') }
let(:page_6) { wiki.find_page('page_6') }
let(:dir_1) do
WikiDirectory.new('dir_1', [wiki.find_page('dir_1/page_2')])
end
Loading
Loading
@@ -38,25 +42,38 @@ describe WikiPage do
WikiDirectory.new('dir_2', pages)
end
 
it 'returns an array with pages and directories' do
expected_grouped_entries = [page_1, dir_1, dir_1_1, dir_2]
context 'sort by title' do
let(:grouped_entries) { described_class.group_by_directory(wiki.pages) }
let(:expected_grouped_entries) { [dir_1_1, dir_1, dir_2, page_1, page_6] }
 
grouped_entries = described_class.group_by_directory(wiki.pages)
it 'returns an array with pages and directories' do
grouped_entries.each_with_index do |page_or_dir, i|
expected_page_or_dir = expected_grouped_entries[i]
expected_slugs = get_slugs(expected_page_or_dir)
slugs = get_slugs(page_or_dir)
 
grouped_entries.each_with_index do |page_or_dir, i|
expected_page_or_dir = expected_grouped_entries[i]
expected_slugs = get_slugs(expected_page_or_dir)
slugs = get_slugs(page_or_dir)
expect(slugs).to match_array(expected_slugs)
end
end
end
context 'sort by created_at' do
let(:grouped_entries) { described_class.group_by_directory(wiki.pages(sort: 'created_at')) }
let(:expected_grouped_entries) { [dir_1_1, page_1, dir_1, dir_2, page_6] }
 
expect(slugs).to match_array(expected_slugs)
it 'returns an array with pages and directories' do
grouped_entries.each_with_index do |page_or_dir, i|
expected_page_or_dir = expected_grouped_entries[i]
expected_slugs = get_slugs(expected_page_or_dir)
slugs = get_slugs(page_or_dir)
expect(slugs).to match_array(expected_slugs)
end
end
end
 
it 'returns an array sorted by alphabetical position' do
# Directories and pages within directories are sorted alphabetically.
# Pages at root come before everything.
expected_order = ['page_1', 'dir_1/page_2', 'dir_1/dir_1_1/page_3',
'dir_2/page_4', 'dir_2/page_5']
it 'returns an array with retained order with directories at the top' do
expected_order = ['dir_1/dir_1_1/page_3', 'dir_1/page_2', 'dir_2/page_4', 'dir_2/page_5', 'page_1', 'page_6']
 
grouped_entries = described_class.group_by_directory(wiki.pages)
 
Loading
Loading
require 'spec_helper'
describe 'GitlabSchema configurations' do
include GraphqlHelpers
let(:project) { create(:project, :repository) }
let!(:query) { graphql_query_for('project', 'fullPath' => project.full_path) }
it 'shows an error if complexity it too high' do
allow(GitlabSchema).to receive(:max_query_complexity).and_return 1
post_graphql(query, current_user: nil)
expect(graphql_errors.first['message']).to include('which exceeds max complexity of 1')
end
end
Loading
Loading
@@ -93,6 +93,8 @@ module GraphqlHelpers
end
 
def all_graphql_fields_for(class_name, parent_types = Set.new)
allow_unlimited_graphql_complexity
type = GitlabSchema.types[class_name.to_s]
return "" unless type
 
Loading
Loading
@@ -170,4 +172,10 @@ module GraphqlHelpers
 
field_type
end
# for most tests, we want to allow unlimited complexity
def allow_unlimited_graphql_complexity
allow_any_instance_of(GitlabSchema).to receive(:max_complexity).and_return nil
allow(GitlabSchema).to receive(:max_query_complexity).with(any_args).and_return nil
end
end
Loading
Loading
@@ -25,12 +25,16 @@ module PrometheusHelpers
"https://prometheus.example.com/api/v1/query?#{query}"
end
 
def prometheus_query_range_url(prometheus_query, start: 8.hours.ago, stop: Time.now.to_f)
def prometheus_query_range_url(prometheus_query, start: 8.hours.ago, stop: Time.now, step: nil)
start = start.to_f
stop = stop.to_f
step ||= Gitlab::PrometheusClient.compute_step(start, stop)
query = {
query: prometheus_query,
start: start.to_f,
start: start,
end: stop,
step: 1.minute.to_i
step: step
}.to_query
 
"https://prometheus.example.com/api/v1/query_range?#{query}"
Loading
Loading
# frozen_string_literal: true
 
shared_examples 'confidential quick action' do
context 'when the current user can update issues' do
it 'does not create a note, and marks the issue as confidential' do
add_note('/confidential')
expect(page).not_to have_content '/confidential'
expect(page).to have_content 'Commands applied'
expect(page).to have_content 'made the issue confidential'
expect(issue.reload).to be_confidential
end
end
context 'when the current user cannot update the issue' do
let(:guest) { create(:user) }
before do
project.add_guest(guest)
gitlab_sign_out
sign_in(guest)
visit project_issue_path(project, issue)
end
it 'does not create a note, and does not mark the issue as confidential' do
add_note('/confidential')
expect(page).not_to have_content 'Commands applied'
expect(page).not_to have_content 'made the issue confidential'
expect(issue.reload).not_to be_confidential
end
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment