Skip to content
Snippets Groups Projects
Commit ae78b85a authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 38149afc
No related branches found
No related tags found
No related merge requests found
Showing
with 807 additions and 230 deletions
Loading
Loading
@@ -3,7 +3,7 @@
module Gitlab
module Kubernetes
module Helm
HELM_VERSION = '2.16.1'
HELM_VERSION = '2.16.3'
KUBECTL_VERSION = '1.13.12'
NAMESPACE = 'gitlab-managed-apps'
NAMESPACE_LABELS = { 'app.gitlab.com/managed_by' => :gitlab }.freeze
Loading
Loading
# frozen_string_literal: true
 
module Gitlab
# Reference Counter
#
# A reference counter is used as a mechanism to identify when
# a repository is being accessed by a writable operation.
#
# Maintenance operations would use this as a clue to when it should
# execute significant changes in order to avoid disrupting running traffic
class ReferenceCounter
REFERENCE_EXPIRE_TIME = 600
 
attr_reader :gl_repository, :key
 
# Reference Counter instance
#
# @example
# Gitlab::ReferenceCounter.new('project-1')
#
# @see Gitlab::GlRepository::RepoType.identifier_for_repositorable
# @param [String] gl_repository repository identifier
def initialize(gl_repository)
@gl_repository = gl_repository
@key = "git-receive-pack-reference-counter:#{gl_repository}"
end
 
# Return the actual counter value
#
# @return [Integer] value
def value
Gitlab::Redis::SharedState.with { |redis| (redis.get(key) || 0).to_i }
Gitlab::Redis::SharedState.with do |redis|
(redis.get(key) || 0).to_i
end
end
 
# Increase the counter
#
# @return [Boolean] whether operation was a success
def increase
redis_cmd do |redis|
redis.incr(key)
Loading
Loading
@@ -22,26 +44,51 @@ module Gitlab
end
end
 
# rubocop:disable Gitlab/RailsLogger
# Decrease the counter
#
# @return [Boolean] whether operation was a success
def decrease
redis_cmd do |redis|
current_value = redis.decr(key)
if current_value < 0
# rubocop:disable Gitlab/RailsLogger
Rails.logger.warn("Reference counter for #{gl_repository} decreased" \
" when its value was less than 1. Reseting the counter.")
" when its value was less than 1. Resetting the counter.")
# rubocop:enable Gitlab/RailsLogger
redis.del(key)
end
end
end
# rubocop:enable Gitlab/RailsLogger
# Reset the reference counter
#
# @private Used internally by SRE and debugging purpose
# @return [Boolean] whether reset was a success
def reset!
redis_cmd do |redis|
redis.del(key)
end
end
# When the reference counter would expire
#
# @api private Used internally by SRE and debugging purpose
# @return [Integer] Number in seconds until expiration or false if never
def expires_in
Gitlab::Redis::SharedState.with do |redis|
redis.ttl(key)
end
end
 
private
 
def redis_cmd
Gitlab::Redis::SharedState.with { |redis| yield(redis) }
true
rescue => e
Rails.logger.warn("GitLab: An unexpected error occurred in writing to Redis: #{e}") # rubocop:disable Gitlab/RailsLogger
false
end
end
Loading
Loading
Loading
Loading
@@ -1728,6 +1728,9 @@ msgstr ""
msgid "An error occurred when updating the issue weight"
msgstr ""
 
msgid "An error occurred while adding formatted title for epic"
msgstr ""
msgid "An error occurred while checking group path"
msgstr ""
 
Loading
Loading
@@ -16372,9 +16375,6 @@ msgstr ""
msgid "Resolve conflicts on source branch"
msgstr ""
 
msgid "Resolve discussion"
msgstr ""
msgid "Resolve thread"
msgstr ""
 
Loading
Loading
@@ -20780,9 +20780,6 @@ msgstr ""
msgid "Unresolve"
msgstr ""
 
msgid "Unresolve discussion"
msgstr ""
msgid "Unresolve thread"
msgstr ""
 
Loading
Loading
Loading
Loading
@@ -5,12 +5,34 @@ require 'spec_helper'
describe ServerlessDomainFinder do
let(:function_name) { 'test-function' }
let(:pages_domain_name) { 'serverless.gitlab.io' }
let(:pages_domain) { create(:pages_domain, :instance_serverless, domain: pages_domain_name) }
let!(:serverless_domain_cluster) { create(:serverless_domain_cluster, uuid: 'abcdef12345678', pages_domain: pages_domain) }
let(:valid_cluster_uuid) { 'aba1cdef123456f278' }
let(:invalid_cluster_uuid) { 'aba1cdef123456f178' }
let!(:environment) { create(:environment, name: 'test') }
 
let(:pages_domain) do
create(
:pages_domain,
:instance_serverless,
domain: pages_domain_name
)
end
let(:knative_with_ingress) do
create(
:clusters_applications_knative,
external_ip: '10.0.0.1'
)
end
let!(:serverless_domain_cluster) do
create(
:serverless_domain_cluster,
uuid: 'abcdef12345678',
pages_domain: pages_domain,
knative: knative_with_ingress
)
end
let(:valid_uri) { "https://#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
let(:valid_fqdn) { "#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
let(:invalid_uri) { "https://#{function_name}-#{invalid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
Loading
Loading
{
"type": "object",
"required": [
"source"
],
"properties": {
"source": { "type": "object",
"required": ["type", "service", "cluster"],
"properties" : {
"type": { "type": "string", "enum": ["serverless"] },
"service": { "type": "string" },
"cluster": { "type": "object",
"required": ["hostname", "address", "port", "cert", "key"],
"properties": {
"hostname": { "type": "string" },
"address": { "type": "string" },
"port": { "type": "integer" },
"cert": { "type": "string" },
"key": { "type": "string" }
},
"additionalProperties": false
}
},
"additionalProperties": false
}
},
"additionalProperties": false
}
{
"type": "object",
"required": [
"lookup_paths",
"certificate",
"key"
],
"properties": {
"certificate": { "type": ["string", "null"] },
"key": { "type": ["string", "null"] },
"lookup_paths": { "type": "array", "items": { "$ref": "lookup_path.json" } }
},
"additionalProperties": false
}
import {
gapiProjectsResponseMock,
gapiZonesResponseMock,
gapiMachineTypesResponseMock,
} from './mock_data';
const cloudbilling = {
projects: {
getBillingInfo: jest.fn(
() =>
new Promise(resolve => {
resolve({
result: { billingEnabled: true },
});
}),
),
},
};
const cloudresourcemanager = {
projects: {
list: jest.fn(
() =>
new Promise(resolve => {
resolve({
result: { ...gapiProjectsResponseMock },
});
}),
),
},
};
const compute = {
zones: {
list: jest.fn(
() =>
new Promise(resolve => {
resolve({
result: { ...gapiZonesResponseMock },
});
}),
),
},
machineTypes: {
list: jest.fn(
() =>
new Promise(resolve => {
resolve({
result: { ...gapiMachineTypesResponseMock },
});
}),
),
},
};
const gapi = {
client: {
cloudbilling,
cloudresourcemanager,
compute,
},
};
export { gapi as default };
import testAction from 'spec/helpers/vuex_action_helper';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/create_cluster/gke_cluster/store/actions';
import { createStore } from '~/create_cluster/gke_cluster/store';
import { gapi } from '../helpers';
import gapi from '../helpers';
import { selectedProjectMock, selectedZoneMock, selectedMachineTypeMock } from '../mock_data';
 
describe('GCP Cluster Dropdown Store Actions', () => {
Loading
Loading
@@ -65,9 +65,10 @@ describe('GCP Cluster Dropdown Store Actions', () => {
 
describe('async fetch methods', () => {
let originalGapi;
beforeAll(() => {
originalGapi = window.gapi;
window.gapi = gapi();
window.gapi = gapi;
});
 
afterAll(() => {
Loading
Loading
Loading
Loading
@@ -72,11 +72,17 @@ describe('self monitor component', () => {
selfMonitoringProjectExists: true,
createSelfMonitoringProjectPath: '/create',
deleteSelfMonitoringProjectPath: '/delete',
selfMonitoringProjectFullPath: 'instance-administrators-random/gitlab-self-monitoring',
});
 
wrapper = shallowMount(SelfMonitor, { store });
 
expect(wrapper.vm.selfMonitoringFormText).toContain('<a href="http://localhost/">');
expect(
wrapper
.find({ ref: 'selfMonitoringFormText' })
.find('a')
.attributes('href'),
).toEqual('http://localhost/instance-administrators-random/gitlab-self-monitoring');
});
});
});
Loading
Loading
import {
gapiProjectsResponseMock,
gapiZonesResponseMock,
gapiMachineTypesResponseMock,
} from './mock_data';
// eslint-disable-next-line import/prefer-default-export
export const gapi = () => ({
client: {
cloudbilling: {
projects: {
getBillingInfo: () =>
new Promise(resolve => {
resolve({
result: { billingEnabled: true },
});
}),
},
},
cloudresourcemanager: {
projects: {
list: () =>
new Promise(resolve => {
resolve({
result: { ...gapiProjectsResponseMock },
});
}),
},
},
compute: {
zones: {
list: () =>
new Promise(resolve => {
resolve({
result: { ...gapiZonesResponseMock },
});
}),
},
machineTypes: {
list: () =>
new Promise(resolve => {
resolve({
result: { ...gapiMachineTypesResponseMock },
});
}),
},
},
},
});
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Processable do
let(:node_class) do
Class.new(::Gitlab::Config::Entry::Node) do
include Gitlab::Ci::Config::Entry::Processable
def self.name
'job'
end
end
end
let(:entry) { node_class.new(config, name: :rspec) }
describe 'validations' do
before do
entry.compose!
end
context 'when entry config value is correct' do
let(:config) { { stage: 'test' } }
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
context 'when job name is empty' do
let(:entry) { node_class.new(config, name: ''.to_sym) }
it 'reports error' do
expect(entry.errors).to include "job name can't be blank"
end
end
end
context 'when entry value is not correct' do
context 'incorrect config value type' do
let(:config) { ['incorrect'] }
describe '#errors' do
it 'reports error about a config type' do
expect(entry.errors)
.to include 'job config should be a hash'
end
end
end
context 'when config is empty' do
let(:config) { {} }
describe '#valid' do
it 'is invalid' do
expect(entry).not_to be_valid
end
end
end
context 'when extends key is not a string' do
let(:config) { { extends: 123 } }
it 'returns error about wrong value type' do
expect(entry).not_to be_valid
expect(entry.errors).to include "job extends should be an array of strings or a string"
end
end
context 'when it uses both "when:" and "rules:"' do
let(:config) do
{
script: 'echo',
when: 'on_failure',
rules: [{ if: '$VARIABLE', when: 'on_success' }]
}
end
it 'returns an error about when: being combined with rules' do
expect(entry).not_to be_valid
expect(entry.errors).to include 'job config key may not be used with `rules`: when'
end
end
context 'when only: is used with rules:' do
let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }
it 'returns error about mixing only: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
context 'and only: is blank' do
let(:config) { { only: nil, rules: [{ if: '$THIS' }] } }
it 'returns error about mixing only: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
end
context 'and rules: is blank' do
let(:config) { { only: ['merge_requests'], rules: nil } }
it 'returns error about mixing only: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
end
end
context 'when except: is used with rules:' do
let(:config) { { except: { refs: %w[master] }, rules: [{ if: '$THIS' }] } }
it 'returns error about mixing except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
context 'and except: is blank' do
let(:config) { { except: nil, rules: [{ if: '$THIS' }] } }
it 'returns error about mixing except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
end
context 'and rules: is blank' do
let(:config) { { except: { refs: %w[master] }, rules: nil } }
it 'returns error about mixing except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
end
end
context 'when only: and except: are both used with rules:' do
let(:config) do
{
only: %w[merge_requests],
except: { refs: %w[master] },
rules: [{ if: '$THIS' }]
}
end
it 'returns errors about mixing both only: and except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
expect(entry.errors).to include /may not be used with `rules`/
end
context 'when only: and except: as both blank' do
let(:config) do
{ only: nil, except: nil, rules: [{ if: '$THIS' }] }
end
it 'returns errors about mixing both only: and except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
expect(entry.errors).to include /may not be used with `rules`/
end
end
context 'when rules: is blank' do
let(:config) do
{ only: %w[merge_requests], except: { refs: %w[master] }, rules: nil }
end
it 'returns errors about mixing both only: and except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
expect(entry.errors).to include /may not be used with `rules`/
end
end
end
end
end
describe '#relevant?' do
it 'is a relevant entry' do
entry = node_class.new({ stage: 'test' }, name: :rspec)
expect(entry).to be_relevant
end
end
describe '#compose!' do
let(:specified) do
double('specified', 'specified?' => true, value: 'specified')
end
let(:unspecified) { double('unspecified', 'specified?' => false) }
let(:default) { double('default', '[]' => unspecified) }
let(:workflow) { double('workflow', 'has_rules?' => false) }
let(:deps) { double('deps', 'default' => default, '[]' => unspecified, 'workflow' => workflow) }
context 'with workflow rules' do
using RSpec::Parameterized::TableSyntax
where(:name, :has_workflow_rules?, :only, :rules, :result) do
"uses default only" | false | nil | nil | { refs: %w[branches tags] }
"uses user only" | false | %w[branches] | nil | { refs: %w[branches] }
"does not define only" | false | nil | [] | nil
"does not define only" | true | nil | nil | nil
"uses user only" | true | %w[branches] | nil | { refs: %w[branches] }
"does not define only" | true | nil | [] | nil
end
with_them do
let(:config) { { script: 'ls', rules: rules, only: only }.compact }
it "#{name}" do
expect(workflow).to receive(:has_rules?) { has_workflow_rules? }
entry.compose!(deps)
expect(entry.only_value).to eq(result)
end
end
end
context 'when workflow rules is used' do
context 'when rules are used' do
let(:config) { { script: 'ls', cache: { key: 'test' }, rules: [] } }
it 'does not define only' do
expect(entry).not_to be_only_defined
end
end
context 'when rules are not used' do
let(:config) { { script: 'ls', cache: { key: 'test' }, only: [] } }
it 'does not define only' do
expect(entry).not_to be_only_defined
end
end
end
end
context 'when composed' do
before do
entry.compose!
end
describe '#value' do
context 'when entry is correct' do
let(:config) do
{ stage: 'test' }
end
it 'returns correct value' do
expect(entry.value)
.to eq(name: :rspec,
stage: 'test',
only: { refs: %w[branches tags] })
end
end
end
end
end
Loading
Loading
@@ -2419,7 +2419,9 @@ module Gitlab
 
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['jobs:rspec config contains unknown keys: bad_tags', 'jobs:rspec rules should be an array of hashes'])
expect(subject.errors).to contain_exactly(
'jobs:rspec config contains unknown keys: bad_tags',
'jobs:rspec rules should be an array of hashes')
expect(subject.content).to be_blank
end
end
Loading
Loading
Loading
Loading
@@ -218,6 +218,8 @@ describe Gitlab::Danger::Helper do
'scripts/foo' | :engineering_productivity
'lib/gitlab/danger/foo' | :engineering_productivity
'ee/lib/gitlab/danger/foo' | :engineering_productivity
'.overcommit.yml.example' | :engineering_productivity
'tooling/overcommit/foo' | :engineering_productivity
 
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | :backend
 
Loading
Loading
Loading
Loading
@@ -4,167 +4,191 @@ require 'spec_helper'
 
describe Gitlab::ImportExport::MembersMapper do
describe 'map members' do
let(:user) { create(:admin) }
let(:project) { create(:project, :public, name: 'searchable_project') }
let(:user2) { create(:user) }
let(:exported_user_id) { 99 }
let(:exported_members) do
[{
"id" => 2,
"access_level" => 40,
"source_id" => 14,
"source_type" => "Project",
"notification_level" => 3,
"created_at" => "2016-03-11T10:21:44.822Z",
"updated_at" => "2016-03-11T10:21:44.822Z",
"created_by_id" => nil,
"invite_email" => nil,
"invite_token" => nil,
"invite_accepted_at" => nil,
"user" =>
{
"id" => exported_user_id,
"email" => user2.email,
"username" => 'test'
},
"user_id" => 19
},
{
"id" => 3,
"access_level" => 40,
"source_id" => 14,
"source_type" => "Project",
"user_id" => nil,
"notification_level" => 3,
"created_at" => "2016-03-11T10:21:44.822Z",
"updated_at" => "2016-03-11T10:21:44.822Z",
"created_by_id" => 1,
"invite_email" => 'invite@test.com',
"invite_token" => 'token',
"invite_accepted_at" => nil
}]
end
let(:members_mapper) do
described_class.new(
exported_members: exported_members, user: user, importable: project)
end
it 'includes the exported user ID in the map' do
expect(members_mapper.map.keys).to include(exported_user_id)
end
it 'maps a project member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end
it 'defaults to importer project member if it does not exist' do
expect(members_mapper.map[-1]).to eq(user.id)
end
it 'has invited members with no user' do
members_mapper.map
expect(ProjectMember.find_by_invite_email('invite@test.com')).not_to be_nil
end
it 'authorizes the users to the project' do
members_mapper.map
expect(user.authorized_project?(project)).to be true
expect(user2.authorized_project?(project)).to be true
end
it 'maps an owner as a maintainer' do
exported_members.first['access_level'] = ProjectMember::OWNER
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
expect(ProjectMember.find_by_user_id(user2.id).access_level).to eq(ProjectMember::MAINTAINER)
end
it 'removes old user_id from member_hash to avoid conflict with user key' do
expect(ProjectMember)
.to receive(:create)
.twice
.with(hash_excluding('user_id'))
.and_call_original
members_mapper.map
end
context 'user is not an admin' do
let(:user) { create(:user) }
it 'does not map a project member' do
expect(members_mapper.map[exported_user_id]).to eq(user.id)
shared_examples 'imports exported members' do
let(:user) { create(:admin) }
let(:user2) { create(:user) }
let(:exported_user_id) { 99 }
let(:exported_members) do
[{
"id" => 2,
"access_level" => 40,
"source_id" => 14,
"source_type" => source_type,
"notification_level" => 3,
"created_at" => "2016-03-11T10:21:44.822Z",
"updated_at" => "2016-03-11T10:21:44.822Z",
"created_by_id" => nil,
"invite_email" => nil,
"invite_token" => nil,
"invite_accepted_at" => nil,
"user" =>
{
"id" => exported_user_id,
"email" => user2.email,
"username" => 'test'
},
"user_id" => 19
},
{
"id" => 3,
"access_level" => 40,
"source_id" => 14,
"source_type" => source_type,
"user_id" => nil,
"notification_level" => 3,
"created_at" => "2016-03-11T10:21:44.822Z",
"updated_at" => "2016-03-11T10:21:44.822Z",
"created_by_id" => 1,
"invite_email" => 'invite@test.com',
"invite_token" => 'token',
"invite_accepted_at" => nil
}]
end
 
it 'defaults to importer project member if it does not exist' do
expect(members_mapper.map[-1]).to eq(user.id)
let(:members_mapper) do
described_class.new(
exported_members: exported_members, user: user, importable: importable)
end
end
 
context 'chooses the one with an email first' do
let(:user3) { create(:user, username: 'test') }
it 'includes the exported user ID in the map' do
expect(members_mapper.map.keys).to include(exported_user_id)
end
 
it 'maps the project member that has a matching email first' do
it 'maps a member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end
end
 
context 'importer same as group member' do
let(:user2) { create(:admin) }
let(:group) { create(:group) }
let(:project) { create(:project, :public, name: 'searchable_project', namespace: group) }
let(:members_mapper) do
described_class.new(
exported_members: exported_members, user: user2, importable: project)
it 'defaults to importer member if it does not exist' do
expect(members_mapper.map[-1]).to eq(user.id)
end
 
before do
group.add_users([user, user2], GroupMember::DEVELOPER)
end
it 'has invited members with no user' do
members_mapper.map
 
it 'maps the project member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
expect(member_class.find_by_invite_email('invite@test.com')).not_to be_nil
end
 
it 'maps the project member if it already exists' do
project.add_maintainer(user2)
it 'removes old user_id from member_hash to avoid conflict with user key' do
expect(member_class)
.to receive(:create)
.twice
.with(hash_excluding('user_id'))
.and_call_original
 
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
members_mapper.map
end
end
 
context 'importing group members' do
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
let(:members_mapper) do
described_class.new(
exported_members: exported_members, user: user, importable: project)
end
context 'user is not an admin' do
let(:user) { create(:user) }
 
before do
group.add_users([user, user2], GroupMember::DEVELOPER)
user.update(email: 'invite@test.com')
it 'does not map a member' do
expect(members_mapper.map[exported_user_id]).to eq(user.id)
end
it 'defaults to importer member if it does not exist' do
expect(members_mapper.map[-1]).to eq(user.id)
end
end
 
it 'maps the importer' do
expect(members_mapper.map[-1]).to eq(user.id)
context 'chooses the one with an email' do
let(:user3) { create(:user, username: 'test') }
it 'maps the member that has a matching email' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end
end
end
 
it 'maps the group member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
context 'when importable is Project' do
include_examples 'imports exported members' do
let(:source_type) { 'Project' }
let(:member_class) { ProjectMember }
let(:importable) { create(:project, :public, name: 'searchable_project') }
it 'authorizes the users to the project' do
members_mapper.map
expect(user.authorized_project?(importable)).to be true
expect(user2.authorized_project?(importable)).to be true
end
it 'maps an owner as a maintainer' do
exported_members.first['access_level'] = ProjectMember::OWNER
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
expect(member_class.find_by_user_id(user2.id).access_level).to eq(ProjectMember::MAINTAINER)
end
context 'importer same as group member' do
let(:user2) { create(:admin) }
let(:group) { create(:group) }
let(:importable) { create(:project, :public, name: 'searchable_project', namespace: group) }
let(:members_mapper) do
described_class.new(
exported_members: exported_members, user: user2, importable: importable)
end
before do
group.add_users([user, user2], GroupMember::DEVELOPER)
end
it 'maps the project member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end
it 'maps the project member if it already exists' do
importable.add_maintainer(user2)
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end
end
context 'importing group members' do
let(:group) { create(:group) }
let(:importable) { create(:project, namespace: group) }
let(:members_mapper) do
described_class.new(
exported_members: exported_members, user: user, importable: importable)
end
before do
group.add_users([user, user2], GroupMember::DEVELOPER)
user.update(email: 'invite@test.com')
end
it 'maps the importer' do
expect(members_mapper.map[-1]).to eq(user.id)
end
it 'maps the group member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end
end
context 'when importer mapping fails' do
let(:exception_message) { 'Something went wrong' }
it 'includes importer specific error message' do
expect(member_class).to receive(:create!).and_raise(StandardError.new(exception_message))
expect { members_mapper.map }.to raise_error(StandardError, "Error adding importer user to Project members. #{exception_message}")
end
end
end
end
 
context 'when importer mapping fails' do
let(:exception_message) { 'Something went wrong' }
context 'when importable is Group' do
include_examples 'imports exported members' do
let(:source_type) { 'Namespace' }
let(:member_class) { GroupMember }
let(:importable) { create(:group) }
 
it 'includes importer specific error message' do
expect(ProjectMember).to receive(:create!).and_raise(StandardError.new(exception_message))
it 'does not lower owner access level' do
exported_members.first['access_level'] = member_class::OWNER
 
expect { members_mapper.map }.to raise_error(StandardError, "Error adding importer user to Project members. #{exception_message}")
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
expect(member_class.find_by_user_id(user2.id).access_level).to eq(member_class::OWNER)
end
end
end
end
Loading
Loading
Loading
Loading
@@ -32,7 +32,7 @@ describe Gitlab::Kubernetes::Helm::Pod do
it 'generates the appropriate specifications for the container' do
container = subject.generate.spec.containers.first
expect(container.name).to eq('helm')
expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.1-kube-1.13.12')
expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.3-kube-1.13.12')
expect(container.env.count).to eq(3)
expect(container.env.map(&:name)).to match_array([:HELM_VERSION, :TILLER_NAMESPACE, :COMMAND_SCRIPT])
expect(container.command).to match_array(["/bin/sh"])
Loading
Loading
Loading
Loading
@@ -2,38 +2,54 @@
 
require 'spec_helper'
 
describe Gitlab::ReferenceCounter do
let(:redis) { double('redis') }
let(:reference_counter_key) { "git-receive-pack-reference-counter:project-1" }
describe Gitlab::ReferenceCounter, :clean_gitlab_redis_shared_state do
let(:reference_counter) { described_class.new('project-1') }
 
before do
allow(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
describe '#increase' do
it 'increases and sets the expire time of a reference count for a path' do
expect { reference_counter.increase }.to change { reference_counter.value }.by(1)
expect(reference_counter.expires_in).to be_positive
expect(reference_counter.increase).to be(true)
end
end
 
it 'increases and set the expire time of a reference count for a path' do
expect(redis).to receive(:incr).with(reference_counter_key)
expect(redis).to receive(:expire).with(reference_counter_key,
described_class::REFERENCE_EXPIRE_TIME)
expect(reference_counter.increase).to be(true)
describe '#decrease' do
it 'decreases the reference count for a path' do
reference_counter.increase
expect { reference_counter.decrease }.to change { reference_counter.value }.by(-1)
end
it 'warns if attempting to decrease a counter with a value of zero or less, and resets the counter' do
expect(Rails.logger).to receive(:warn).with("Reference counter for project-1" \
" decreased when its value was less than 1. Resetting the counter.")
expect { reference_counter.decrease }.not_to change { reference_counter.value }
end
end
 
it 'decreases the reference count for a path' do
allow(redis).to receive(:decr).and_return(0)
expect(redis).to receive(:decr).with(reference_counter_key)
expect(reference_counter.decrease).to be(true)
describe '#value' do
it 'get the reference count for a path' do
expect(reference_counter.value).to eq(0)
reference_counter.increase
expect(reference_counter.value).to eq(1)
end
end
 
it 'warns if attempting to decrease a counter with a value of one or less, and resets the counter' do
expect(redis).to receive(:decr).and_return(-1)
expect(redis).to receive(:del)
expect(Rails.logger).to receive(:warn).with("Reference counter for project-1" \
" decreased when its value was less than 1. Reseting the counter.")
expect(reference_counter.decrease).to be(true)
describe '#reset!' do
it 'resets reference count down to zero' do
3.times { reference_counter.increase }
expect { reference_counter.reset! }.to change { reference_counter.value}.from(3).to(0)
end
end
 
it 'get the reference count for a path' do
allow(redis).to receive(:get).and_return(1)
expect(reference_counter.value).to be(1)
describe '#expires_in' do
it 'displays the expiration time in seconds' do
reference_counter.increase
expect(reference_counter.expires_in).to be_between(500, 600)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200221105436_update_application_setting_npm_package_requests_forwarding_default.rb')
describe UpdateApplicationSettingNpmPackageRequestsForwardingDefault, :migration do
# Create test data - pipeline and CI/CD jobs.
let(:application_settings) { table(:application_settings) }
before do
application_settings.create!(npm_package_requests_forwarding: false)
end
# Test just the up migration.
it 'correctly migrates the application setting' do
expect { migrate! }.to change { current_application_setting }.from(false).to(true)
end
# Test a reversible migration.
it 'correctly migrates up and down the application setting' do
reversible_migration do |migration|
# Expectations will run before the up migration,
# and then again after the down migration
migration.before -> {
expect(current_application_setting).to eq false
}
# Expectations will run after the up migration.
migration.after -> {
expect(current_application_setting).to eq true
}
end
end
def current_application_setting
ApplicationSetting.current_without_cache.npm_package_requests_forwarding
end
end
Loading
Loading
@@ -56,6 +56,88 @@ describe API::Internal::Pages do
end
end
 
context 'serverless domain' do
let(:namespace) { create(:namespace, name: 'gitlab-org') }
let(:project) { create(:project, namespace: namespace, name: 'gitlab-ce') }
let(:environment) { create(:environment, project: project) }
let(:pages_domain) { create(:pages_domain, domain: 'serverless.gitlab.io') }
let(:knative_without_ingress) { create(:clusters_applications_knative) }
let(:knative_with_ingress) { create(:clusters_applications_knative, external_ip: '10.0.0.1') }
context 'without a knative ingress gateway IP' do
let!(:serverless_domain_cluster) do
create(
:serverless_domain_cluster,
uuid: 'abcdef12345678',
pages_domain: pages_domain,
knative: knative_without_ingress
)
end
let(:serverless_domain) do
create(
:serverless_domain,
serverless_domain_cluster: serverless_domain_cluster,
environment: environment
)
end
it 'responds with 204 no content' do
query_host(serverless_domain.uri.host)
expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to be_empty
end
end
context 'with a knative ingress gateway IP' do
let!(:serverless_domain_cluster) do
create(
:serverless_domain_cluster,
uuid: 'abcdef12345678',
pages_domain: pages_domain,
knative: knative_with_ingress
)
end
let(:serverless_domain) do
create(
:serverless_domain,
serverless_domain_cluster: serverless_domain_cluster,
environment: environment
)
end
it 'responds with proxy configuration' do
query_host(serverless_domain.uri.host)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('internal/serverless/virtual_domain')
expect(json_response['certificate']).to eq(pages_domain.certificate)
expect(json_response['key']).to eq(pages_domain.key)
expect(json_response['lookup_paths']).to eq(
[
{
'source' => {
'type' => 'serverless',
'service' => "test-function.#{project.name}-#{project.id}-#{environment.slug}.#{serverless_domain_cluster.knative.hostname}",
'cluster' => {
'hostname' => serverless_domain_cluster.knative.hostname,
'address' => serverless_domain_cluster.knative.external_ip,
'port' => 443,
'cert' => serverless_domain_cluster.certificate,
'key' => serverless_domain_cluster.key
}
}
}
]
)
end
end
end
context 'custom domain' do
let(:namespace) { create(:namespace, name: 'gitlab-org') }
let(:project) { create(:project, namespace: namespace, name: 'gitlab-ce') }
Loading
Loading
Loading
Loading
@@ -612,7 +612,8 @@ module Ci
allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner,
jobs_running_for_project: expected_jobs_running_for_project_first_job }, 1800)
jobs_running_for_project: expected_jobs_running_for_project_first_job,
shard: expected_shard }, 1800)
 
execute(runner)
end
Loading
Loading
@@ -625,7 +626,8 @@ module Ci
allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner,
jobs_running_for_project: expected_jobs_running_for_project_third_job }, 1800)
jobs_running_for_project: expected_jobs_running_for_project_third_job,
shard: expected_shard }, 1800)
 
execute(runner)
end
Loading
Loading
@@ -638,13 +640,28 @@ module Ci
end
 
context 'when shared runner is used' do
let(:runner) { shared_runner }
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 tag2)) }
let(:expected_shared_runner) { true }
let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { 0 }
let(:expected_jobs_running_for_project_third_job) { 2 }
 
it_behaves_like 'metrics collector'
 
context 'when metrics_shard tag is defined' do
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 metrics_shard::shard_tag tag2)) }
let(:expected_shard) { 'shard_tag' }
it_behaves_like 'metrics collector'
end
context 'when multiple metrics_shard tag is defined' do
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 metrics_shard::shard_tag metrics_shard::shard_tag_2 tag2)) }
let(:expected_shard) { 'shard_tag' }
it_behaves_like 'metrics collector'
end
context 'when pending job with queued_at=nil is used' do
before do
pending_job.update(queued_at: nil)
Loading
Loading
@@ -662,8 +679,9 @@ module Ci
end
 
context 'when specific runner is used' do
let(:runner) { specific_runner }
let(:runner) { create(:ci_runner, :project, projects: [project], tag_list: %w(tag1 metrics_shard::shard_tag tag2)) }
let(:expected_shared_runner) { false }
let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { '+Inf' }
let(:expected_jobs_running_for_project_third_job) { '+Inf' }
 
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment