Skip to content
Snippets Groups Projects
Commit 8b1228b0 authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 96b0c124
No related branches found
No related tags found
No related merge requests found
Showing
with 481 additions and 143 deletions
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of release assets.
#
class Release
class Assets < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[links].freeze
attributes ALLOWED_KEYS
entry :links, Entry::Release::Assets::Links, description: 'Release assets:links.'
validations do
validates :config, allowed_keys: ALLOWED_KEYS
validates :links, array_of_hashes: true, presence: true
end
helpers :links
def value
@config[:links] = links_value if @config.key?(:links)
@config
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of release:assets:links.
#
class Release
class Assets
class Link < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
include ::Gitlab::Config::Entry::Attributable
ALLOWED_KEYS = %i[name url].freeze
attributes ALLOWED_KEYS
validations do
validates :config, allowed_keys: ALLOWED_KEYS
validates :name, type: String, presence: true
validates :url, presence: true, addressable_url: true
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of release:assets:links.
#
class Release
class Assets
class Links < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Validatable
entry :link, Entry::Release::Assets::Link, description: 'Release assets:links:link.'
validations do
validates :config, type: Array, presence: true
end
def skip_config_hash_validation?
true
end
end
end
end
end
end
end
end
Loading
Loading
@@ -81,10 +81,15 @@ module Gitlab
instance: job[:instance],
start_in: job[:start_in],
trigger: job[:trigger],
bridge_needs: job.dig(:needs, :bridge)&.first
bridge_needs: job.dig(:needs, :bridge)&.first,
release: release(job)
}.compact }.compact
end
 
def release(job)
job[:release] if Feature.enabled?(:ci_release_generation, default_enabled: false)
end
def stage_builds_attributes(stage)
@jobs.values
.select { |job| job[:stage] == stage }
Loading
Loading
@@ -133,7 +138,6 @@ module Gitlab
 
@jobs.each do |name, job|
# logical validation for job
validate_job_stage!(name, job)
validate_job_dependencies!(name, job)
validate_job_needs!(name, job)
Loading
Loading
Loading
Loading
@@ -10,7 +10,7 @@ module Gitlab
def attributes(*attributes)
attributes.flatten.each do |attribute|
if method_defined?(attribute)
raise ArgumentError, 'Method already defined!'
raise ArgumentError, "Method already defined: #{attribute}"
end
 
define_method(attribute) do
Loading
Loading
Loading
Loading
@@ -5,7 +5,7 @@ module Gitlab
module Entry
##
# This mixin is responsible for adding DSL, which purpose is to
# simplifly process of adding child nodes.
# simplify the process of adding child nodes.
#
# This can be used only if parent node is a configuration entry that
# holds a hash as a configuration value, for example:
Loading
Loading
Loading
Loading
@@ -8,9 +8,17 @@ module Gitlab
feature = Feature.get(feature_key)
return feature.enabled? if Feature.persisted?(feature)
 
# Disable Rugged auto-detect(can_use_disk?) when Puma threads>1
# https://gitlab.com/gitlab-org/gitlab/issues/119326
return false if running_puma_with_multiple_threads?
Gitlab::GitalyClient.can_use_disk?(repo.storage)
end
 
def running_puma_with_multiple_threads?
Gitlab::Runtime.puma? && ::Puma.cli_config.options[:max_threads] > 1
end
def execute_rugged_call(method_name, *args)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
start = Gitlab::Metrics::System.monotonic_time
Loading
Loading
Loading
Loading
@@ -3,6 +3,14 @@
module Gitlab
module Pagination
class Base
def paginate(relation)
raise NotImplementedError
end
def finalize(records)
# Optional: Called with the actual set of records
end
private
 
def per_page
Loading
Loading
Loading
Loading
@@ -3,10 +3,6 @@
module Gitlab
module Pagination
module Keyset
def self.paginate(request_context, relation)
Gitlab::Pagination::Keyset::Pager.new(request_context).paginate(relation)
end
def self.available?(request_context, relation)
order_by = request_context.page.order_by
 
Loading
Loading
Loading
Loading
@@ -11,14 +11,13 @@ module Gitlab
# Maximum number of records for a page
MAXIMUM_PAGE_SIZE = 100
 
attr_accessor :lower_bounds, :end_reached
attr_accessor :lower_bounds
attr_reader :order_by
 
def initialize(order_by: {}, lower_bounds: nil, per_page: DEFAULT_PAGE_SIZE, end_reached: false)
def initialize(order_by: {}, lower_bounds: nil, per_page: DEFAULT_PAGE_SIZE)
@order_by = order_by.symbolize_keys
@lower_bounds = lower_bounds&.symbolize_keys
@per_page = per_page
@end_reached = end_reached
end
 
# Number of records to return per page
Loading
Loading
@@ -28,17 +27,11 @@ module Gitlab
[@per_page, MAXIMUM_PAGE_SIZE].min
end
 
# Determine whether this page indicates the end of the collection
def end_reached?
@end_reached
end
# Construct a Page for the next page
# Uses identical order_by/per_page information for the next page
def next(lower_bounds, end_reached)
def next(lower_bounds)
dup.tap do |next_page|
next_page.lower_bounds = lower_bounds&.symbolize_keys
next_page.end_reached = end_reached
end
end
end
Loading
Loading
Loading
Loading
@@ -14,27 +14,20 @@ module Gitlab
# Validate assumption: The last two columns must match the page order_by
validate_order!(relation)
 
# This performs the database query and retrieves records
# We retrieve one record more to check if we have data beyond this page
all_records = relation.limit(page.per_page + 1).to_a # rubocop: disable CodeReuse/ActiveRecord
records_for_page = all_records.first(page.per_page)
# If we retrieved more records than belong on this page,
# we know there's a next page
there_is_more = all_records.size > records_for_page.size
apply_headers(records_for_page.last, there_is_more)
relation.limit(page.per_page) # rubocop: disable CodeReuse/ActiveRecord
end
 
records_for_page
def finalize(records)
apply_headers(records.last)
end
 
private
 
def apply_headers(last_record_in_page, there_is_more)
end_reached = last_record_in_page.nil? || !there_is_more
lower_bounds = last_record_in_page&.slice(page.order_by.keys)
def apply_headers(last_record_in_page)
return unless last_record_in_page
 
next_page = page.next(lower_bounds, end_reached)
lower_bounds = last_record_in_page&.slice(page.order_by.keys)
next_page = page.next(lower_bounds)
 
request.apply_headers(next_page)
end
Loading
Loading
Loading
Loading
@@ -68,8 +68,6 @@ module Gitlab
end
 
def pagination_links(next_page)
return if next_page.end_reached?
%(<#{page_href(next_page)}>; rel="next")
end
 
Loading
Loading
Loading
Loading
@@ -21,6 +21,8 @@ module QA
delete delete_project_request.url
 
expect_status(202)
Page::Main::Menu.perform(&:sign_out_if_signed_in)
end
 
it 'user imports a GitHub repo' do
Loading
Loading
Loading
Loading
@@ -202,6 +202,53 @@ describe('IDE store file actions', () => {
};
});
 
describe('call to service', () => {
const callExpectation = serviceCalled => {
store.dispatch('getFileData', { path: localFile.path });
if (serviceCalled) {
expect(service.getFileData).toHaveBeenCalled();
} else {
expect(service.getFileData).not.toHaveBeenCalled();
}
};
beforeEach(() => {
service.getFileData.mockImplementation(() => new Promise(() => {}));
});
it("isn't called if file.raw exists", () => {
localFile.raw = 'raw data';
callExpectation(false);
});
it("isn't called if file is a tempFile", () => {
localFile.raw = '';
localFile.tempFile = true;
callExpectation(false);
});
it('is called if file is a tempFile but also renamed', () => {
localFile.raw = '';
localFile.tempFile = true;
localFile.prevPath = 'old_path';
callExpectation(true);
});
it('is called if tempFile but file was deleted and readded', () => {
localFile.raw = '';
localFile.tempFile = true;
localFile.prevPath = 'old_path';
store.state.stagedFiles = [{ ...localFile, deleted: true }];
callExpectation(true);
});
});
describe('success', () => {
beforeEach(() => {
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).replyOnce(
Loading
Loading
@@ -332,10 +379,10 @@ describe('IDE store file actions', () => {
mock.onGet(`${RELATIVE_URL_ROOT}/test/test/7297abc/${localFile.path}`).networkError();
});
 
it('dispatches error action', done => {
it('dispatches error action', () => {
const dispatch = jest.fn();
 
actions
return actions
.getFileData(
{ state: store.state, commit() {}, dispatch, getters: store.getters },
{ path: localFile.path },
Loading
Loading
@@ -350,10 +397,7 @@ describe('IDE store file actions', () => {
makeFileActive: true,
},
});
done();
})
.catch(done.fail);
});
});
});
});
Loading
Loading
@@ -446,12 +490,14 @@ describe('IDE store file actions', () => {
mock.onGet(/(.*)/).networkError();
});
 
it('dispatches error action', done => {
it('dispatches error action', () => {
const dispatch = jest.fn();
 
actions
.getRawFileData({ state: store.state, commit() {}, dispatch }, { path: tmpFile.path })
.then(done.fail)
return actions
.getRawFileData(
{ state: store.state, commit() {}, dispatch, getters: store.getters },
{ path: tmpFile.path },
)
.catch(() => {
expect(dispatch).toHaveBeenCalledWith('setErrorMessage', {
text: 'An error occurred whilst loading the file content.',
Loading
Loading
@@ -461,8 +507,6 @@ describe('IDE store file actions', () => {
path: tmpFile.path,
},
});
done();
});
});
});
Loading
Loading
Loading
Loading
@@ -11,7 +11,7 @@ describe('IDE store file mutations', () => {
beforeEach(() => {
localStore = createStore();
localState = localStore.state;
localFile = { ...file(), type: 'blob' };
localFile = { ...file('file'), type: 'blob', content: 'original' };
 
localState.entries[localFile.path] = localFile;
});
Loading
Loading
@@ -139,35 +139,68 @@ describe('IDE store file mutations', () => {
});
 
describe('SET_FILE_RAW_DATA', () => {
it('sets raw data', () => {
const callMutationForFile = f => {
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
file: f,
raw: 'testing',
fileDeletedAndReadded: localStore.getters.isFileDeletedAndReadded(localFile.path),
});
};
it('sets raw data', () => {
callMutationForFile(localFile);
 
expect(localFile.raw).toBe('testing');
});
 
it('sets raw data to stagedFile if file was deleted and readded', () => {
localState.stagedFiles = [{ ...localFile, deleted: true }];
localFile.tempFile = true;
callMutationForFile(localFile);
expect(localFile.raw).toBeFalsy();
expect(localState.stagedFiles[0].raw).toBe('testing');
});
it("sets raw data to a file's content if tempFile is empty", () => {
localFile.tempFile = true;
localFile.content = '';
callMutationForFile(localFile);
expect(localFile.raw).toBeFalsy();
expect(localFile.content).toBe('testing');
});
it('adds raw data to open pending file', () => {
localState.openFiles.push({ ...localFile, pending: true });
 
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
raw: 'testing',
});
callMutationForFile(localFile);
 
expect(localState.openFiles[0].raw).toBe('testing');
});
 
it('does not add raw data to open pending tempFile file', () => {
localState.openFiles.push({ ...localFile, pending: true, tempFile: true });
it('sets raw to content of a renamed tempFile', () => {
localFile.tempFile = true;
localFile.prevPath = 'old_path';
localState.openFiles.push({ ...localFile, pending: true });
 
mutations.SET_FILE_RAW_DATA(localState, {
file: localFile,
raw: 'testing',
});
callMutationForFile(localFile);
 
expect(localState.openFiles[0].raw).not.toBe('testing');
expect(localState.openFiles[0].content).toBe('testing');
});
it('adds raw data to a staged deleted file if unstaged change has a tempFile of the same name', () => {
localFile.tempFile = true;
localState.openFiles.push({ ...localFile, pending: true });
localState.stagedFiles = [{ ...localFile, deleted: true }];
callMutationForFile(localFile);
expect(localFile.raw).toBeFalsy();
expect(localState.stagedFiles[0].raw).toBe('testing');
});
});
 
Loading
Loading
import { readmeFile } from '~/repository/utils/readme';
 
describe('readmeFile', () => {
describe('markdown files', () => {
it('returns markdown file', () => {
expect(readmeFile([{ name: 'README' }, { name: 'README.md' }])).toEqual({
name: 'README.md',
});
it('prefers README with markup over plain text README', () => {
expect(readmeFile([{ name: 'README' }, { name: 'README.md' }])).toEqual({
name: 'README.md',
});
});
 
expect(readmeFile([{ name: 'README' }, { name: 'index.md' }])).toEqual({
name: 'index.md',
});
it('is case insensitive', () => {
expect(readmeFile([{ name: 'README' }, { name: 'readme.rdoc' }])).toEqual({
name: 'readme.rdoc',
});
});
 
describe('plain files', () => {
it('returns plain file', () => {
expect(readmeFile([{ name: 'README' }, { name: 'TEST.md' }])).toEqual({
name: 'README',
});
it('returns the first README found', () => {
expect(readmeFile([{ name: 'INDEX.adoc' }, { name: 'README.md' }])).toEqual({
name: 'INDEX.adoc',
});
});
 
expect(readmeFile([{ name: 'readme' }, { name: 'TEST.md' }])).toEqual({
name: 'readme',
});
it('expects extension to be separated by dot', () => {
expect(readmeFile([{ name: 'readmeXorg' }, { name: 'index.org' }])).toEqual({
name: 'index.org',
});
});
 
describe('non-previewable file', () => {
it('returns undefined', () => {
expect(readmeFile([{ name: 'index.js' }, { name: 'TEST.md' }])).toBe(undefined);
it('returns plain text README when there is no README with markup', () => {
expect(readmeFile([{ name: 'README' }, { name: 'NOT_README.md' }])).toEqual({
name: 'README',
});
});
it('returns undefined when there are no appropriate files', () => {
expect(readmeFile([{ name: 'index.js' }, { name: 'md.README' }])).toBe(undefined);
expect(readmeFile([])).toBe(undefined);
});
});
Loading
Loading
@@ -5,70 +5,14 @@ require 'spec_helper'
describe API::Helpers::Pagination do
subject { Class.new.include(described_class).new }
 
let(:expected_result) { double("result", to_a: double) }
let(:relation) { double("relation") }
let(:params) { {} }
let(:paginator) { double('paginator') }
let(:relation) { double('relation') }
let(:expected_result) { double('expected result') }
 
before do
allow(subject).to receive(:params).and_return(params)
end
describe '#paginate' do
let(:offset_pagination) { double("offset pagination") }
it 'delegates to OffsetPagination' do
expect(::Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(offset_pagination)
expect(offset_pagination).to receive(:paginate).with(relation).and_return(expected_result)
result = subject.paginate(relation)
expect(result).to eq(expected_result)
end
end
describe '#paginate_and_retrieve!' do
context 'for offset pagination' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available?).and_return(false)
end
it 'delegates to paginate' do
expect(subject).to receive(:paginate).with(relation).and_return(expected_result)
result = subject.paginate_and_retrieve!(relation)
expect(result).to eq(expected_result.to_a)
end
end
context 'for keyset pagination' do
let(:params) { { pagination: 'keyset' } }
let(:request_context) { double('request context') }
before do
allow(Gitlab::Pagination::Keyset::RequestContext).to receive(:new).with(subject).and_return(request_context)
end
context 'when keyset pagination is available' do
it 'delegates to KeysetPagination' do
expect(Gitlab::Pagination::Keyset).to receive(:available?).and_return(true)
expect(Gitlab::Pagination::Keyset).to receive(:paginate).with(request_context, relation).and_return(expected_result)
result = subject.paginate_and_retrieve!(relation)
expect(result).to eq(expected_result.to_a)
end
end
context 'when keyset pagination is not available' do
it 'renders a 501 error if keyset pagination isnt available yet' do
expect(Gitlab::Pagination::Keyset).to receive(:available?).with(request_context, relation).and_return(false)
expect(Gitlab::Pagination::Keyset).not_to receive(:paginate)
expect(subject).to receive(:error!).with(/not yet available/, 405)
it 'delegates to OffsetPagination' do
expect(Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(paginator)
expect(paginator).to receive(:paginate).with(relation).and_return(expected_result)
 
subject.paginate_and_retrieve!(relation)
end
end
end
expect(subject.paginate(relation)).to eq(expected_result)
end
end
# frozen_string_literal: true
require 'spec_helper'
describe API::Helpers::PaginationStrategies do
subject { Class.new.include(described_class).new }
let(:expected_result) { double("result") }
let(:relation) { double("relation") }
let(:params) { {} }
before do
allow(subject).to receive(:params).and_return(params)
end
describe '#paginate_with_strategies' do
let(:paginator) { double("paginator", paginate: expected_result, finalize: nil) }
before do
allow(subject).to receive(:paginator).with(relation).and_return(paginator)
end
it 'yields paginated relation' do
expect { |b| subject.paginate_with_strategies(relation, &b) }.to yield_with_args(expected_result)
end
it 'calls #finalize with first value returned from block' do
return_value = double
expect(paginator).to receive(:finalize).with(return_value)
subject.paginate_with_strategies(relation) do |records|
some_options = {}
[return_value, some_options]
end
end
it 'returns whatever the block returns' do
return_value = [double, double]
result = subject.paginate_with_strategies(relation) do |records|
return_value
end
expect(result).to eq(return_value)
end
end
describe '#paginator' do
context 'offset pagination' do
let(:paginator) { double("paginator") }
before do
allow(subject).to receive(:keyset_pagination_enabled?).and_return(false)
end
it 'delegates to OffsetPagination' do
expect(Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(paginator)
expect(subject.paginator(relation)).to eq(paginator)
end
end
context 'for keyset pagination' do
let(:params) { { pagination: 'keyset' } }
let(:request_context) { double('request context') }
let(:pager) { double('pager') }
before do
allow(subject).to receive(:keyset_pagination_enabled?).and_return(true)
allow(Gitlab::Pagination::Keyset::RequestContext).to receive(:new).with(subject).and_return(request_context)
end
context 'when keyset pagination is available' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available?).and_return(true)
allow(Gitlab::Pagination::Keyset::Pager).to receive(:new).with(request_context).and_return(pager)
end
it 'delegates to Pager' do
expect(subject.paginator(relation)).to eq(pager)
end
end
context 'when keyset pagination is not available' do
before do
allow(Gitlab::Pagination::Keyset).to receive(:available?).with(request_context, relation).and_return(false)
end
it 'renders a 501 error' do
expect(subject).to receive(:error!).with(/not yet available/, 405)
subject.paginator(relation)
end
end
end
end
end
Loading
Loading
@@ -24,7 +24,7 @@ describe Gitlab::Ci::Config::Entry::Job do
let(:result) do
%i[before_script script stage type after_script cache
image services only except rules needs variables artifacts
environment coverage retry interruptible timeout tags]
environment coverage retry interruptible timeout release tags]
end
 
it { is_expected.to match_array result }
Loading
Loading
@@ -122,6 +122,21 @@ describe Gitlab::Ci::Config::Entry::Job do
 
it { expect(entry).to be_valid }
end
context 'when it is a release' do
let(:config) do
{
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "v0.06",
name: "Release $CI_TAG_NAME",
description: "./release_changelog.txt"
}
}
end
it { expect(entry).to be_valid }
end
end
end
 
Loading
Loading
@@ -443,6 +458,25 @@ describe Gitlab::Ci::Config::Entry::Job do
expect(entry.timeout).to eq('1m 1s')
end
end
context 'when it is a release' do
context 'when `release:description` is missing' do
let(:config) do
{
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "v0.06",
name: "Release $CI_TAG_NAME"
}
}
end
it "returns error" do
expect(entry).not_to be_valid
expect(entry.errors).to include "release description can't be blank"
end
end
end
end
end
 
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Release::Assets::Link do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) do
{
name: "cool-app.zip",
url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip"
}
end
describe '#value' do
it 'returns link configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when name is not a string' do
let(:config) { { name: 123, url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip" } }
it 'reports error' do
expect(entry.errors)
.to include 'link name should be a string'
end
end
context 'when name is not present' do
let(:config) { { url: "http://my.awesome.download.site/1.0-$CI_COMMIT_SHORT_SHA.zip" } }
it 'reports error' do
expect(entry.errors)
.to include "link name can't be blank"
end
end
context 'when url is not addressable' do
let(:config) { { name: "cool-app.zip", url: "xyz" } }
it 'reports error' do
expect(entry.errors)
.to include "link url is blocked: only allowed schemes are http, https"
end
end
context 'when url is not present' do
let(:config) { { name: "cool-app.zip" } }
it 'reports error' do
expect(entry.errors)
.to include "link url can't be blank"
end
end
context 'when there is an unknown key present' do
let(:config) { { test: 100 } }
it 'reports error' do
expect(entry.errors)
.to include 'link config contains unknown keys: test'
end
end
end
end
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment