Skip to content
Snippets Groups Projects
Commit a19a376b authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 556c79d6
No related branches found
No related tags found
No related merge requests found
Showing
with 205 additions and 499 deletions
Loading
Loading
@@ -35,7 +35,7 @@ module Gitlab
query.length >= min_chars_for_partial_matching
end
 
# column - The column name to search in.
# column - The column name / Arel column to search in.
# query - The text to search for.
# lower_exact_match - When set to `true` we'll fall back to using
# `LOWER(column) = query` instead of using `ILIKE`.
Loading
Loading
@@ -43,19 +43,21 @@ module Gitlab
query = query.squish
return unless query.present?
 
arel_column = column.is_a?(Arel::Attributes::Attribute) ? column : arel_table[column]
words = select_fuzzy_words(query, use_minimum_char_limit: use_minimum_char_limit)
 
if words.any?
words.map { |word| arel_table[column].matches(to_pattern(word, use_minimum_char_limit: use_minimum_char_limit)) }.reduce(:and)
words.map { |word| arel_column.matches(to_pattern(word, use_minimum_char_limit: use_minimum_char_limit)) }.reduce(:and)
else
# No words of at least 3 chars, but we can search for an exact
# case insensitive match with the query as a whole
if lower_exact_match
Arel::Nodes::NamedFunction
.new('LOWER', [arel_table[column]])
.new('LOWER', [arel_column])
.eq(query)
else
arel_table[column].matches(sanitize_sql_like(query))
arel_column.matches(sanitize_sql_like(query))
end
end
end
Loading
Loading
Loading
Loading
@@ -36,6 +36,10 @@ module Quality
workers
elastic_integration
],
migration: %w[
migrations
lib/gitlab/background_migration
],
integration: %w[
controllers
mailers
Loading
Loading
@@ -62,6 +66,10 @@ module Quality
 
def level_for(file_path)
case file_path
# Detect migration first since some background migration tests are under
# spec/lib/gitlab/background_migration and tests under spec/lib are unit by default
when regexp(:migration)
:migration
when regexp(:unit)
:unit
when regexp(:integration)
Loading
Loading
Loading
Loading
@@ -1702,6 +1702,9 @@ msgstr ""
msgid "An error occurred while saving the approval settings"
msgstr ""
 
msgid "An error occurred while saving the template. Please check if the template exists."
msgstr ""
msgid "An error occurred while subscribing to notifications."
msgstr ""
 
Loading
Loading
@@ -15054,6 +15057,9 @@ msgstr ""
msgid "Save pipeline schedule"
msgstr ""
 
msgid "Save template"
msgstr ""
msgid "Save variables"
msgstr ""
 
Loading
Loading
@@ -15715,6 +15721,12 @@ msgstr ""
msgid "Service Desk is enabled but not yet active"
msgstr ""
 
msgid "Service Desk is off"
msgstr ""
msgid "Service Desk is on"
msgstr ""
msgid "Service Templates"
msgstr ""
 
Loading
Loading
@@ -17133,6 +17145,12 @@ msgstr ""
msgid "Template"
msgstr ""
 
msgid "Template to append to all Service Desk issues"
msgstr ""
msgid "Template was successfully saved."
msgstr ""
msgid "Templates"
msgstr ""
 
Loading
Loading
Loading
Loading
@@ -4,11 +4,6 @@ module QA
module Page
module Mattermost
class Main < Page::Base
##
# TODO, define all selectors required by this page object
#
# See gitlab-org/gitlab-qa#154
#
view 'app/views/projects/mattermosts/new.html.haml'
 
def initialize
Loading
Loading
Loading
Loading
@@ -12,7 +12,7 @@ module QA
 
view 'app/assets/javascripts/deploy_keys/components/app.vue' do
element :deploy_keys_section, /class=".*deploy\-keys.*"/ # rubocop:disable QA/ElementWithPattern
element :project_deploy_keys, 'class="qa-project-deploy-keys"' # rubocop:disable QA/ElementWithPattern
element :project_deploy_keys
end
 
view 'app/assets/javascripts/deploy_keys/components/key.vue' do
Loading
Loading
Loading
Loading
@@ -52,10 +52,10 @@ gitlab:
resources:
requests:
cpu: 650m
memory: 880M
memory: 970M
limits:
cpu: 975m
memory: 1320M
memory: 1450M
task-runner:
resources:
requests:
Loading
Loading
@@ -68,10 +68,10 @@ gitlab:
resources:
requests:
cpu: 500m
memory: 1540M
memory: 1630M
limits:
cpu: 750m
memory: 2310M
memory: 2450M
deployment:
readinessProbe:
initialDelaySeconds: 5 # Default is 0
Loading
Loading
@@ -92,18 +92,18 @@ gitlab:
gitlab-runner:
resources:
requests:
cpu: 450m
cpu: 675m
memory: 100M
limits:
cpu: 675m
cpu: 1015m
memory: 150M
minio:
resources:
requests:
cpu: 5m
cpu: 9m
memory: 128M
limits:
cpu: 10m
cpu: 15m
memory: 280M
nginx-ingress:
controller:
Loading
Loading
Loading
Loading
@@ -140,43 +140,33 @@ describe('collapsible registry container', () => {
});
 
describe('tracking', () => {
const category = 'mock_page';
const testTrackingCall = action => {
expect(Tracking.event).toHaveBeenCalledWith(undefined, action, {
label: 'registry_repository_delete',
});
};
beforeEach(() => {
jest.spyOn(Tracking, 'event');
wrapper.vm.deleteItem = jest.fn().mockResolvedValue();
wrapper.vm.fetchRepos = jest.fn();
wrapper.setData({
tracking: {
...wrapper.vm.tracking,
category,
},
});
});
 
it('send an event when delete button is clicked', () => {
const deleteBtn = findDeleteBtn();
deleteBtn.trigger('click');
expect(Tracking.event).toHaveBeenCalledWith(category, 'click_button', {
label: 'registry_repository_delete',
category,
});
testTrackingCall('click_button');
});
it('send an event when cancel is pressed on modal', () => {
const deleteModal = findDeleteModal();
deleteModal.vm.$emit('cancel');
expect(Tracking.event).toHaveBeenCalledWith(category, 'cancel_delete', {
label: 'registry_repository_delete',
category,
});
testTrackingCall('cancel_delete');
});
it('send an event when confirm is clicked on modal', () => {
const deleteModal = findDeleteModal();
deleteModal.vm.$emit('ok');
 
expect(Tracking.event).toHaveBeenCalledWith(category, 'confirm_delete', {
label: 'registry_repository_delete',
category,
});
testTrackingCall('confirm_delete');
});
});
});
Loading
Loading
@@ -304,17 +304,14 @@ describe('table registry', () => {
});
 
describe('event tracking', () => {
const mockPageName = 'mock_page';
const testTrackingCall = (action, label = 'registry_tag_delete') => {
expect(Tracking.event).toHaveBeenCalledWith(undefined, action, { label, property: 'foo' });
};
 
beforeEach(() => {
jest.spyOn(Tracking, 'event');
wrapper.vm.handleSingleDelete = jest.fn();
wrapper.vm.handleMultipleDelete = jest.fn();
document.body.dataset.page = mockPageName;
});
afterEach(() => {
document.body.dataset.page = null;
});
 
describe('single tag delete', () => {
Loading
Loading
@@ -325,29 +322,25 @@ describe('table registry', () => {
it('send an event when delete button is clicked', () => {
const deleteBtn = findDeleteButtonsRow();
deleteBtn.at(0).trigger('click');
expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'click_button', {
label: 'registry_tag_delete',
property: 'foo',
});
testTrackingCall('click_button');
});
it('send an event when cancel is pressed on modal', () => {
const deleteModal = findDeleteModal();
deleteModal.vm.$emit('cancel');
expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'cancel_delete', {
label: 'registry_tag_delete',
property: 'foo',
});
testTrackingCall('cancel_delete');
});
it('send an event when confirm is clicked on modal', () => {
const deleteModal = findDeleteModal();
deleteModal.vm.$emit('ok');
 
expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'confirm_delete', {
label: 'registry_tag_delete',
property: 'foo',
});
testTrackingCall('confirm_delete');
});
});
describe('bulk tag delete', () => {
beforeEach(() => {
const items = [0, 1, 2];
Loading
Loading
@@ -357,27 +350,22 @@ describe('table registry', () => {
it('send an event when delete button is clicked', () => {
const deleteBtn = findDeleteButton();
deleteBtn.vm.$emit('click');
expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'click_button', {
label: 'bulk_registry_tag_delete',
property: 'foo',
});
testTrackingCall('click_button', 'bulk_registry_tag_delete');
});
it('send an event when cancel is pressed on modal', () => {
const deleteModal = findDeleteModal();
deleteModal.vm.$emit('cancel');
expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'cancel_delete', {
label: 'bulk_registry_tag_delete',
property: 'foo',
});
testTrackingCall('cancel_delete', 'bulk_registry_tag_delete');
});
it('send an event when confirm is clicked on modal', () => {
const deleteModal = findDeleteModal();
deleteModal.vm.$emit('ok');
 
expect(Tracking.event).toHaveBeenCalledWith(mockPageName, 'confirm_delete', {
label: 'bulk_registry_tag_delete',
property: 'foo',
});
testTrackingCall('confirm_delete', 'bulk_registry_tag_delete');
});
});
});
Loading
Loading
Loading
Loading
@@ -10,47 +10,13 @@ describe API::Helpers::Pagination do
let(:offset_pagination) { double("offset pagination") }
let(:expected_result) { double("result") }
 
before do
allow(subject).to receive(:params).and_return(params)
end
context 'for offset pagination' do
let(:params) { {} }
it 'delegates to OffsetPagination' do
expect(::Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(offset_pagination)
expect(offset_pagination).to receive(:paginate).with(relation).and_return(expected_result)
result = subject.paginate(relation)
expect(result).to eq(expected_result)
end
end
context 'for keyset pagination' do
let(:params) { { pagination: 'keyset' } }
let(:request_context) { double('request context') }
before do
allow(Gitlab::Pagination::Keyset::RequestContext).to receive(:new).with(subject).and_return(request_context)
allow(Gitlab::Pagination::Keyset).to receive(:available?).and_return(true)
end
it 'delegates to KeysetPagination' do
expect(Gitlab::Pagination::Keyset).to receive(:paginate).with(request_context, relation).and_return(expected_result)
result = subject.paginate(relation)
expect(result).to eq(expected_result)
end
it 'delegates to OffsetPagination' do
expect(::Gitlab::Pagination::OffsetPagination).to receive(:new).with(subject).and_return(offset_pagination)
expect(offset_pagination).to receive(:paginate).with(relation).and_return(expected_result)
 
it 'renders a 501 error if keyset pagination isnt available yet' do
expect(Gitlab::Pagination::Keyset).to receive(:available?).with(request_context, relation).and_return(false)
expect(Gitlab::Pagination::Keyset).not_to receive(:paginate)
expect(subject).to receive(:error!).with(/not yet available/, 501)
result = subject.paginate(relation)
 
subject.paginate(relation)
end
expect(result).to eq(expected_result)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Pagination::Keyset::Page do
describe '#per_page' do
it 'limits to a maximum of 20 records per page' do
per_page = described_class.new(per_page: 21).per_page
expect(per_page).to eq(described_class::DEFAULT_PAGE_SIZE)
end
it 'uses default value when given 0' do
per_page = described_class.new(per_page: 0).per_page
expect(per_page).to eq(described_class::DEFAULT_PAGE_SIZE)
end
it 'uses default value when given negative values' do
per_page = described_class.new(per_page: -1).per_page
expect(per_page).to eq(described_class::DEFAULT_PAGE_SIZE)
end
it 'uses the given value if it is within range' do
per_page = described_class.new(per_page: 10).per_page
expect(per_page).to eq(10)
end
end
describe '#next' do
let(:page) { described_class.new(order_by: order_by, lower_bounds: lower_bounds, per_page: per_page, end_reached: end_reached) }
subject { page.next(new_lower_bounds, new_end_reached) }
let(:order_by) { { id: :desc } }
let(:lower_bounds) { { id: 42 } }
let(:per_page) { 10 }
let(:end_reached) { false }
let(:new_lower_bounds) { { id: 21 } }
let(:new_end_reached) { true }
it 'copies over order_by' do
expect(subject.order_by).to eq(page.order_by)
end
it 'copies over per_page' do
expect(subject.per_page).to eq(page.per_page)
end
it 'dups the instance' do
expect(subject).not_to eq(page)
end
it 'sets lower_bounds only on new instance' do
expect(subject.lower_bounds).to eq(new_lower_bounds)
expect(page.lower_bounds).to eq(lower_bounds)
end
it 'sets end_reached only on new instance' do
expect(subject.end_reached?).to eq(new_end_reached)
expect(page.end_reached?).to eq(end_reached)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Pagination::Keyset::Pager do
let(:relation) { Project.all.order(id: :asc) }
let(:request) { double('request', page: page, apply_headers: nil) }
let(:page) { Gitlab::Pagination::Keyset::Page.new(order_by: { id: :asc }, per_page: 3) }
let(:next_page) { double('next page') }
before_all do
create_list(:project, 7)
end
describe '#paginate' do
subject { described_class.new(request).paginate(relation) }
it 'loads the result relation only once' do
expect do
subject
end.not_to exceed_query_limit(1)
end
it 'passes information about next page to request' do
lower_bounds = relation.limit(page.per_page).last.slice(:id)
expect(page).to receive(:next).with(lower_bounds, false).and_return(next_page)
expect(request).to receive(:apply_headers).with(next_page)
subject
end
context 'when retrieving the last page' do
let(:relation) { Project.where('id > ?', Project.maximum(:id) - page.per_page).order(id: :asc) }
it 'indicates this is the last page' do
expect(request).to receive(:apply_headers) do |next_page|
expect(next_page.end_reached?).to be_truthy
end
subject
end
end
context 'when retrieving an empty page' do
let(:relation) { Project.where('id > ?', Project.maximum(:id) + 1).order(id: :asc) }
it 'indicates this is the last page' do
expect(request).to receive(:apply_headers) do |next_page|
expect(next_page.end_reached?).to be_truthy
end
subject
end
end
it 'returns an array with the loaded records' do
expect(subject).to eq(relation.limit(page.per_page).to_a)
end
context 'validating the order clause' do
let(:page) { Gitlab::Pagination::Keyset::Page.new(order_by: { created_at: :asc }, per_page: 3) }
it 'raises an error if has a different order clause than the page' do
expect { subject }.to raise_error(ArgumentError, /order_by does not match/)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Pagination::Keyset::RequestContext do
let(:request) { double('request', params: params) }
describe '#page' do
subject { described_class.new(request).page }
context 'with only order_by given' do
let(:params) { { order_by: :id } }
it 'extracts order_by/sorting information' do
page = subject
expect(page.order_by).to eq(id: :desc)
end
end
context 'with order_by and sort given' do
let(:params) { { order_by: :created_at, sort: :desc } }
it 'extracts order_by/sorting information and adds tie breaker' do
page = subject
expect(page.order_by).to eq(created_at: :desc, id: :desc)
end
end
context 'with no order_by information given' do
let(:params) { {} }
it 'defaults to tie breaker' do
page = subject
expect(page.order_by).to eq({ id: :desc })
end
end
context 'with per_page params given' do
let(:params) { { per_page: 10 } }
it 'extracts per_page information' do
page = subject
expect(page.per_page).to eq(params[:per_page])
end
end
end
describe '#apply_headers' do
let(:request) { double('request', url: "http://#{Gitlab.config.gitlab.host}/api/v4/projects?foo=bar") }
let(:params) { { foo: 'bar' } }
let(:request_context) { double('request context', params: params, request: request) }
let(:next_page) { double('next page', order_by: { id: :asc }, lower_bounds: { id: 42 }, end_reached?: false) }
subject { described_class.new(request_context).apply_headers(next_page) }
it 'sets Links header with same host/path as the original request' do
orig_uri = URI.parse(request_context.request.url)
expect(request_context).to receive(:header) do |name, header|
expect(name).to eq('Links')
first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
uri = URI.parse(first_link)
expect(uri.host).to eq(orig_uri.host)
expect(uri.path).to eq(orig_uri.path)
end
subject
end
it 'sets Links header with a link to the next page' do
orig_uri = URI.parse(request_context.request.url)
expect(request_context).to receive(:header) do |name, header|
expect(name).to eq('Links')
first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
query = CGI.parse(URI.parse(first_link).query)
expect(query.except('id_after')).to eq(CGI.parse(orig_uri.query).except('id_after'))
expect(query['id_after']).to eq(['42'])
end
subject
end
context 'with descending order' do
let(:next_page) { double('next page', order_by: { id: :desc }, lower_bounds: { id: 42 }, end_reached?: false) }
it 'sets Links header with a link to the next page' do
orig_uri = URI.parse(request_context.request.url)
expect(request_context).to receive(:header) do |name, header|
expect(name).to eq('Links')
first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
query = CGI.parse(URI.parse(first_link).query)
expect(query.except('id_before')).to eq(CGI.parse(orig_uri.query).except('id_before'))
expect(query['id_before']).to eq(['42'])
end
subject
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Pagination::Keyset do
describe '.paginate' do
subject { described_class.paginate(request_context, relation) }
let(:request_context) { double }
let(:relation) { double }
let(:pager) { double }
let(:result) { double }
it 'uses Pager to paginate the relation' do
expect(Gitlab::Pagination::Keyset::Pager).to receive(:new).with(request_context).and_return(pager)
expect(pager).to receive(:paginate).with(relation).and_return(result)
expect(subject).to eq(result)
end
end
describe '.available?' do
subject { described_class }
let(:request_context) { double("request context", page: page)}
let(:page) { double("page", order_by: order_by) }
shared_examples_for 'keyset pagination is available' do
it 'returns true for Project' do
expect(subject.available?(request_context, Project.all)).to be_truthy
end
it 'return false for other types of relations' do
expect(subject.available?(request_context, User.all)).to be_falsey
end
end
context 'with order-by id asc' do
let(:order_by) { { id: :asc } }
it_behaves_like 'keyset pagination is available'
end
context 'with order-by id desc' do
let(:order_by) { { id: :desc } }
it_behaves_like 'keyset pagination is available'
end
context 'with other order-by columns' do
let(:order_by) { { created_at: :desc, id: :desc } }
it 'returns false for Project' do
expect(subject.available?(request_context, Project.all)).to be_falsey
end
it 'return false for other types of relations' do
expect(subject.available?(request_context, User.all)).to be_falsey
end
end
end
end
Loading
Loading
@@ -207,5 +207,15 @@ describe Gitlab::SQL::Pattern do
expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%' AND .*title.*I?LIKE '\%really bar\%'/)
end
end
context 'when passing an Arel column' do
let(:query) { 'foo' }
subject(:fuzzy_arel_match) { Project.fuzzy_arel_match(Route.arel_table[:path], query) }
it 'returns a condition with the table and column name' do
expect(fuzzy_arel_match.to_sql).to match(/"routes"."path".*ILIKE '\%foo\%'/)
end
end
end
end
Loading
Loading
@@ -25,6 +25,13 @@ RSpec.describe Quality::TestLevel do
end
end
 
context 'when level is migration' do
it 'returns a pattern' do
expect(subject.pattern(:migration))
.to eq("spec/{migrations,lib/gitlab/background_migration}{,/**/}*_spec.rb")
end
end
context 'when level is integration' do
it 'returns a pattern' do
expect(subject.pattern(:integration))
Loading
Loading
@@ -79,6 +86,13 @@ RSpec.describe Quality::TestLevel do
end
end
 
context 'when level is migration' do
it 'returns a regexp' do
expect(subject.regexp(:migration))
.to eq(%r{spec/(migrations|lib/gitlab/background_migration)})
end
end
context 'when level is integration' do
it 'returns a regexp' do
expect(subject.regexp(:integration))
Loading
Loading
@@ -116,6 +130,18 @@ RSpec.describe Quality::TestLevel do
expect(subject.level_for('spec/models/abuse_report_spec.rb')).to eq(:unit)
end
 
it 'returns the correct level for a migration test' do
expect(subject.level_for('spec/migrations/add_default_and_free_plans_spec.rb')).to eq(:migration)
end
it 'returns the correct level for a background_migration test' do
expect(subject.level_for('spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb')).to eq(:migration)
end
it 'returns the correct level for a geo migration test' do
expect(described_class.new('ee/').level_for('ee/spec/migrations/geo/migrate_ci_job_artifacts_to_separate_registry_spec.rb')).to eq(:migration)
end
it 'returns the correct level for an integration test' do
expect(subject.level_for('spec/mailers/abuse_report_mailer_spec.rb')).to eq(:integration)
end
Loading
Loading
Loading
Loading
@@ -307,6 +307,28 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
expect(lookup_entries).not_to include(max_number_of_sessions_plus_one.to_s, max_number_of_sessions_plus_two.to_s)
end
end
it 'removes obsolete lookup entries even without active session' do
Gitlab::Redis::SharedState.with do |redis|
redis.sadd(
"session:lookup:user:gitlab:#{user.id}",
"#{max_number_of_sessions_plus_two + 1}"
)
end
ActiveSession.cleanup(user)
Gitlab::Redis::SharedState.with do |redis|
lookup_entries = redis.smembers("session:lookup:user:gitlab:#{user.id}")
expect(lookup_entries.count).to eq(ActiveSession::ALLOWED_NUMBER_OF_ACTIVE_SESSIONS)
expect(lookup_entries).not_to include(
max_number_of_sessions_plus_one.to_s,
max_number_of_sessions_plus_two.to_s,
(max_number_of_sessions_plus_two + 1).to_s
)
end
end
end
end
end
Loading
Loading
@@ -1662,7 +1662,7 @@ describe Project do
end
 
describe '.search' do
let(:project) { create(:project, description: 'kitten mittens') }
let_it_be(:project) { create(:project, description: 'kitten mittens') }
 
it 'returns projects with a matching name' do
expect(described_class.search(project.name)).to eq([project])
Loading
Loading
@@ -1700,6 +1700,39 @@ describe Project do
expect(described_class.search(project.path.upcase)).to eq([project])
end
 
context 'by full path' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
context 'when feature is enabled' do
before do
stub_feature_flags(project_search_by_full_path: true)
end
it 'returns projects that match the group path' do
expect(described_class.search(group.path)).to eq([project])
end
it 'returns projects that match the full path' do
expect(described_class.search(project.full_path)).to eq([project])
end
end
context 'when feature is disabled' do
before do
stub_feature_flags(project_search_by_full_path: false)
end
it 'returns no results when searching by group path' do
expect(described_class.search(group.path)).to be_empty
end
it 'returns no results when searching by full path' do
expect(described_class.search(project.full_path)).to be_empty
end
end
end
describe 'with pending_delete project' do
let(:pending_delete_project) { create(:project, pending_delete: true) }
 
Loading
Loading
Loading
Loading
@@ -570,87 +570,6 @@ describe API::Projects do
let(:projects) { Project.all }
end
end
context 'with keyset pagination' do
let(:current_user) { user }
let(:projects) { [public_project, project, project2, project3] }
context 'headers and records' do
let(:params) { { pagination: 'keyset', order_by: :id, sort: :asc, per_page: 1 } }
it 'includes a pagination header with link to the next page' do
get api('/projects', current_user), params: params
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_after=#{public_project.id}")
end
it 'contains only the first project with per_page = 1' do
get api('/projects', current_user), params: params
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(public_project.id)
end
it 'does not include a link if the end has reached and there is no more data' do
get api('/projects', current_user), params: params.merge(id_after: project2.id)
expect(response.header).not_to include('Links')
end
it 'responds with 501 if order_by is different from id' do
get api('/projects', current_user), params: params.merge(order_by: :created_at)
expect(response).to have_gitlab_http_status(501)
end
end
context 'with descending sorting' do
let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 1 } }
it 'includes a pagination header with link to the next page' do
get api('/projects', current_user), params: params
expect(response.header).to include('Links')
expect(response.header['Links']).to include('pagination=keyset')
expect(response.header['Links']).to include("id_before=#{project3.id}")
end
it 'contains only the last project with per_page = 1' do
get api('/projects', current_user), params: params
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).to contain_exactly(project3.id)
end
end
context 'retrieving the full relation' do
let(:params) { { pagination: 'keyset', order_by: :id, sort: :desc, per_page: 2 } }
it 'returns all projects' do
url = '/projects'
requests = 0
ids = []
while url && requests <= 5 # circuit breaker
requests += 1
get api(url, current_user), params: params
links = response.header['Links']
url = links&.match(/<[^>]+(\/projects\?[^>]+)>; rel="next"/) do |match|
match[1]
end
ids += JSON.parse(response.body).map { |p| p['id'] }
end
expect(ids).to contain_exactly(*projects.map(&:id))
end
end
end
end
 
describe 'POST /projects' do
Loading
Loading
Loading
Loading
@@ -15,6 +15,39 @@ RSpec.configure do |config|
delete_from_all_tables!
end
 
config.append_after(:context, :migration) do
delete_from_all_tables!
# Postgres maximum number of columns in a table is 1600 (https://github.com/postgres/postgres/blob/de41869b64d57160f58852eab20a27f248188135/src/include/access/htup_details.h#L23-L47).
# And since:
# "The DROP COLUMN form does not physically remove the column, but simply makes
# it invisible to SQL operations. Subsequent insert and update operations in the
# table will store a null value for the column. Thus, dropping a column is quick
# but it will not immediately reduce the on-disk size of your table, as the space
# occupied by the dropped column is not reclaimed.
# The space will be reclaimed over time as existing rows are updated."
# according to https://www.postgresql.org/docs/current/sql-altertable.html.
# We drop and recreate the database if any table has more than 1200 columns, just to be safe.
max_allowed_columns = 1200
tables_with_more_than_allowed_columns =
ApplicationRecord.connection.execute("SELECT attrelid::regclass::text AS table, COUNT(*) AS column_count FROM pg_attribute GROUP BY attrelid HAVING COUNT(*) > #{max_allowed_columns}")
if tables_with_more_than_allowed_columns.any?
tables_with_more_than_allowed_columns.each do |result|
puts "The #{result['table']} table has #{result['column_count']} columns."
end
puts "Recreating the database"
start = Gitlab::Metrics::System.monotonic_time
ActiveRecord::Tasks::DatabaseTasks.drop_current
ActiveRecord::Tasks::DatabaseTasks.create_current
ActiveRecord::Tasks::DatabaseTasks.load_schema_current
ActiveRecord::Tasks::DatabaseTasks.migrate
puts "Database re-creation done in #{Gitlab::Metrics::System.monotonic_time - start}"
end
end
config.around(:each, :delete) do |example|
self.class.use_transactional_tests = false
 
Loading
Loading
Loading
Loading
@@ -2,7 +2,7 @@
 
require 'rake_helper'
 
describe 'gitlab:import_export:import rake task' do
describe 'gitlab:import_export:import rake task', :sidekiq do
let(:username) { 'root' }
let(:namespace_path) { username }
let!(:user) { create(:user, username: username) }
Loading
Loading
@@ -12,6 +12,8 @@ describe 'gitlab:import_export:import rake task' do
before do
Rake.application.rake_require('tasks/gitlab/import_export/import')
allow(Settings.uploads.object_store).to receive(:[]=).and_call_original
allow_any_instance_of(GitlabProjectImport).to receive(:exit)
.and_raise(RuntimeError, 'exit not handled')
end
 
around do |example|
Loading
Loading
@@ -95,6 +97,10 @@ describe 'gitlab:import_export:import rake task' do
end
 
it 'fails project import with an error' do
# Catch exit call, and raise exception instead
expect_any_instance_of(GitlabProjectImport).to receive(:exit)
.with(1).and_raise(SystemExit)
expect { subject }.to raise_error(SystemExit).and output(error).to_stdout
 
expect(project.merge_requests).to be_empty
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment