Skip to content
Snippets Groups Projects
Commit 946771d0 authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent f1e2fca1
No related branches found
No related tags found
No related merge requests found
Showing
with 642 additions and 95 deletions
Loading
Loading
@@ -3,6 +3,7 @@
module Gitlab
module Checks
class SnippetCheck < BaseChecker
DEFAULT_BRANCH = 'master'.freeze
ERROR_MESSAGES = {
create_delete_branch: 'You can not create or delete branches.'
}.freeze
Loading
Loading
@@ -29,6 +30,12 @@ module Gitlab
 
true
end
private
def creation?
@branch_name != DEFAULT_BRANCH && super
end
end
end
end
Loading
Loading
@@ -65,6 +65,7 @@ tree:
- resource_label_events:
- label:
- :priorities
- :external_pull_requests
- ci_pipelines:
- notes:
- :author
Loading
Loading
@@ -74,7 +75,6 @@ tree:
- :statuses
- :external_pull_request
- :merge_request
- :external_pull_requests
- :auto_devops
- :triggers
- :pipeline_schedules
Loading
Loading
Loading
Loading
@@ -51,6 +51,8 @@ module Gitlab
epic
ProjectCiCdSetting
container_expiration_policy
external_pull_request
external_pull_requests
].freeze
 
def create
Loading
Loading
# frozen_string_literal: true
module Gitlab
class SidekiqQueue
include Gitlab::Utils::StrongMemoize
NoMetadataError = Class.new(StandardError)
InvalidQueueError = Class.new(StandardError)
attr_reader :queue_name
def initialize(queue_name)
@queue_name = queue_name
end
def drop_jobs!(search_metadata, timeout:)
completed = false
deleted_jobs = 0
job_search_metadata =
search_metadata
.stringify_keys
.slice(*Labkit::Context::KNOWN_KEYS)
.transform_keys { |key| "meta.#{key}" }
.compact
raise NoMetadataError if job_search_metadata.empty?
raise InvalidQueueError unless queue
begin
Timeout.timeout(timeout) do
queue.each do |job|
next unless job_matches?(job, job_search_metadata)
job.delete
deleted_jobs += 1
end
completed = true
end
rescue Timeout::Error
end
{
completed: completed,
deleted_jobs: deleted_jobs,
queue_size: queue.size
}
end
def queue
strong_memoize(:queue) do
# Sidekiq::Queue.new always returns a queue, even if it doesn't
# exist.
Sidekiq::Queue.all.find { |queue| queue.name == queue_name }
end
end
def job_matches?(job, job_search_metadata)
job_search_metadata.all? { |key, value| job[key] == value }
end
end
end
Loading
Loading
@@ -4993,9 +4993,6 @@ msgstr ""
msgid "Commits|An error occurred while fetching merge requests data."
msgstr ""
 
msgid "Commits|Commit: %{commitText}"
msgstr ""
msgid "Commits|History"
msgstr ""
 
Loading
Loading
@@ -8774,9 +8771,6 @@ msgstr ""
msgid "Forking in progress"
msgstr ""
 
msgid "Forking repository"
msgstr ""
msgid "Forks"
msgstr ""
 
Loading
Loading
Loading
Loading
@@ -16,7 +16,7 @@ FactoryBot.define do
 
page { OpenStruct.new(url_path: 'some-name') }
association :wiki, factory: :project_wiki, strategy: :build
initialize_with { new(wiki, page, true) }
initialize_with { new(wiki, page) }
 
before(:create) do |page, evaluator|
page.attributes = evaluator.attrs
Loading
Loading
Loading
Loading
@@ -139,11 +139,6 @@ describe 'Project active tab' do
it_behaves_like 'page has active sub tab', _('Repository Analytics')
end
 
context 'on project Analytics/Repository Analytics' do
it_behaves_like 'page has active tab', _('Analytics')
it_behaves_like 'page has active sub tab', _('Repository Analytics')
end
context 'on project Analytics/Cycle Analytics' do
before do
click_tab(_('CI / CD Analytics'))
Loading
Loading
Loading
Loading
@@ -14,7 +14,7 @@ describe 'Projects > Show > User sees last commit CI status' do
 
page.within '.commit-detail' do
expect(page).to have_content(project.commit.sha[0..6])
expect(page).to have_selector('[aria-label="Commit: skipped"]')
expect(page).to have_selector('[aria-label="Pipeline: skipped"]')
end
end
end
Loading
Loading
@@ -33,6 +33,8 @@ describe 'User views a wiki page' do
fill_in(:wiki_content, with: 'wiki content')
click_on('Create page')
end
expect(page).to have_content('Wiki was successfully updated.')
end
 
it 'shows the history of a page that has a path' do
Loading
Loading
@@ -62,8 +64,10 @@ describe 'User views a wiki page' do
expect(page).to have_content('Edit Page')
 
fill_in('Content', with: 'Updated Wiki Content')
click_on('Save changes')
expect(page).to have_content('Wiki was successfully updated.')
click_on('Page history')
 
page.within(:css, '.nav-text') do
Loading
Loading
@@ -132,6 +136,36 @@ describe 'User views a wiki page' do
end
end
 
context 'when a page has special characters in its title' do
let(:title) { '<foo> !@#$%^&*()[]{}=_+\'"\\|<>? <bar>' }
before do
wiki_page.update(title: title )
end
it 'preserves the special characters' do
visit(project_wiki_path(project, wiki_page))
expect(page).to have_css('.wiki-page-title', text: title)
expect(page).to have_css('.wiki-pages li', text: title)
end
end
context 'when a page has XSS in its title or content' do
let(:title) { '<script>alert("title")<script>' }
before do
wiki_page.update(title: title, content: 'foo <script>alert("content")</script> bar')
end
it 'safely displays the page' do
visit(project_wiki_path(project, wiki_page))
expect(page).to have_css('.wiki-page-title', text: title)
expect(page).to have_content('foo bar')
end
end
context 'when a page has XSS in its message' do
before do
wiki_page.update(message: '<script>alert(true)<script>', content: 'XSS update')
Loading
Loading
{
"approvals_before_merge": 0,
"archived": false,
"auto_cancel_pending_pipelines": "enabled",
"autoclose_referenced_issues": true,
"boards": [],
"build_allow_git_fetch": true,
"build_coverage_regex": null,
"build_timeout": 3600,
"ci_cd_settings": {
"group_runners_enabled": true
},
"ci_config_path": null,
"ci_pipelines": [
{
"before_sha": "0000000000000000000000000000000000000000",
"committed_at": null,
"config_source": "repository_source",
"created_at": "2020-02-25T12:08:40.615Z",
"duration": 61,
"external_pull_request": {
"created_at": "2020-02-25T12:08:40.478Z",
"id": 59023,
"project_id": 17121868,
"pull_request_iid": 4,
"source_branch": "new-branch",
"source_repository": "liptonshmidt/dotfiles",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"status": "open",
"target_branch": "master",
"target_repository": "liptonshmidt/dotfiles",
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:08:40.478Z"
},
"failure_reason": null,
"finished_at": "2020-02-25T12:09:44.464Z",
"id": 120842687,
"iid": 8,
"lock_version": 3,
"notes": [],
"project_id": 17121868,
"protected": false,
"ref": "new-branch",
"sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"source": "external_pull_request_event",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"stages": [],
"started_at": "2020-02-25T12:08:42.511Z",
"status": "success",
"tag": false,
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:09:44.473Z",
"user_id": 4087087,
"yaml_errors": null
},
{
"before_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"committed_at": null,
"config_source": "repository_source",
"created_at": "2020-02-25T12:08:37.434Z",
"duration": 57,
"external_pull_request": {
"created_at": "2020-02-25T12:08:40.478Z",
"id": 59023,
"project_id": 17121868,
"pull_request_iid": 4,
"source_branch": "new-branch",
"source_repository": "liptonshmidt/dotfiles",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"status": "open",
"target_branch": "master",
"target_repository": "liptonshmidt/dotfiles",
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:08:40.478Z"
},
"failure_reason": null,
"finished_at": "2020-02-25T12:09:36.557Z",
"id": 120842675,
"iid": 7,
"lock_version": 3,
"notes": [],
"project_id": 17121868,
"protected": false,
"ref": "new-branch",
"sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"source": "external_pull_request_event",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"stages": [],
"started_at": "2020-02-25T12:08:38.682Z",
"status": "success",
"tag": false,
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:09:36.565Z",
"user_id": 4087087,
"yaml_errors": null
}
],
"custom_attributes": [],
"delete_error": null,
"description": "Vim, Tmux and others",
"disable_overriding_approvers_per_merge_request": null,
"external_authorization_classification_label": "",
"external_pull_requests": [
{
"created_at": "2020-02-25T12:08:40.478Z",
"id": 59023,
"project_id": 17121868,
"pull_request_iid": 4,
"source_branch": "new-branch",
"source_repository": "liptonshmidt/dotfiles",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"status": "open",
"target_branch": "master",
"target_repository": "liptonshmidt/dotfiles",
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:08:40.478Z"
}
],
"external_webhook_token": "D3mVYFzZkgZ5kMfcW_wx",
"issues": [],
"labels": [],
"milestones": [],
"pipeline_schedules": [],
"project_feature": {
"builds_access_level": 20,
"created_at": "2020-02-25T11:20:09.925Z",
"forking_access_level": 20,
"id": 17494715,
"issues_access_level": 0,
"merge_requests_access_level": 0,
"pages_access_level": 20,
"project_id": 17121868,
"repository_access_level": 20,
"snippets_access_level": 0,
"updated_at": "2020-02-25T11:20:10.376Z",
"wiki_access_level": 0
},
"public_builds": true,
"releases": [],
"shared_runners_enabled": true,
"snippets": [],
"triggers": [],
"visibility_level": 20
}
Loading
Loading
@@ -68,10 +68,10 @@ exports[`Repository last commit component renders commit widget 1`] = `
<gl-link-stub
class="js-commit-pipeline"
href="https://test.com/pipeline"
title="Commit: failed"
title="Pipeline: failed"
>
<ci-icon-stub
aria-label="Commit: failed"
aria-label="Pipeline: failed"
cssclasses=""
size="24"
status="[object Object]"
Loading
Loading
@@ -174,10 +174,10 @@ exports[`Repository last commit component renders the signature HTML as returned
<gl-link-stub
class="js-commit-pipeline"
href="https://test.com/pipeline"
title="Commit: failed"
title="Pipeline: failed"
>
<ci-icon-stub
aria-label="Commit: failed"
aria-label="Pipeline: failed"
cssclasses=""
size="24"
status="[object Object]"
Loading
Loading
Loading
Loading
@@ -2,6 +2,8 @@ import $ from 'jquery';
import Api from '~/api';
import Search from '~/pages/search/show/search';
 
jest.mock('~/api');
describe('Search', () => {
const fixturePath = 'search/show.html';
const searchTerm = 'some search';
Loading
Loading
@@ -19,20 +21,19 @@ describe('Search', () => {
new Search(); // eslint-disable-line no-new
});
 
it('requests groups from backend when filtering', done => {
spyOn(Api, 'groups').and.callFake(term => {
it('requests groups from backend when filtering', () => {
jest.spyOn(Api, 'groups').mockImplementation(term => {
expect(term).toBe(searchTerm);
done();
});
const inputElement = fillDropdownInput('.js-search-group-dropdown');
 
$(inputElement).trigger('input');
});
 
it('requests projects from backend when filtering', done => {
spyOn(Api, 'projects').and.callFake(term => {
it('requests projects from backend when filtering', () => {
jest.spyOn(Api, 'projects').mockImplementation(term => {
expect(term).toBe(searchTerm);
done();
});
const inputElement = fillDropdownInput('.js-search-project-dropdown');
 
Loading
Loading
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { compileToFunctions } from 'vue-template-compiler';
import { mount } from '@vue/test-utils';
import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
import imageDiffViewer from '~/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue';
 
Loading
Loading
@@ -9,50 +11,52 @@ describe('ImageDiffViewer', () => {
newPath: GREEN_BOX_IMAGE_URL,
oldPath: RED_BOX_IMAGE_URL,
};
const allProps = {
...requiredProps,
oldSize: 2048,
newSize: 1024,
};
let wrapper;
let vm;
 
function createComponent(props) {
const ImageDiffViewer = Vue.extend(imageDiffViewer);
vm = mountComponent(ImageDiffViewer, props);
wrapper = mount(ImageDiffViewer, { propsData: props });
vm = wrapper.vm;
}
 
const triggerEvent = (eventName, el = vm.$el, clientX = 0) => {
const event = document.createEvent('MouseEvents');
event.initMouseEvent(
eventName,
true,
true,
window,
1,
const event = new MouseEvent(eventName, {
bubbles: true,
cancelable: true,
view: window,
detail: 1,
screenX: clientX,
clientX,
0,
clientX,
0,
false,
false,
false,
false,
0,
null,
);
});
// JSDOM does not implement experimental APIs
event.pageX = clientX;
 
el.dispatchEvent(event);
};
 
const dragSlider = (sliderElement, dragPixel = 20) => {
const dragSlider = (sliderElement, doc, dragPixel) => {
triggerEvent('mousedown', sliderElement);
triggerEvent('mousemove', document.body, dragPixel);
triggerEvent('mouseup', document.body);
triggerEvent('mousemove', doc.body, dragPixel);
triggerEvent('mouseup', doc.body);
};
 
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
 
it('renders image diff for replaced', done => {
createComponent(requiredProps);
createComponent({ ...allProps });
vm.$nextTick(() => {
const metaInfoElements = vm.$el.querySelectorAll('.image-info');
 
setTimeout(() => {
expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
 
expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
Loading
Loading
@@ -66,35 +70,35 @@ describe('ImageDiffViewer', () => {
'Onion skin',
);
 
expect(metaInfoElements.length).toBe(2);
expect(metaInfoElements[0]).toHaveText('2.00 KiB');
expect(metaInfoElements[1]).toHaveText('1.00 KiB');
done();
});
});
 
it('renders image diff for new', done => {
createComponent(
Object.assign({}, requiredProps, {
diffMode: 'new',
oldPath: '',
}),
);
setTimeout(() => {
createComponent({ ...allProps, diffMode: 'new', oldPath: '' });
setImmediate(() => {
const metaInfoElement = vm.$el.querySelector('.image-info');
expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
expect(metaInfoElement).toHaveText('1.00 KiB');
 
done();
});
});
 
it('renders image diff for deleted', done => {
createComponent(
Object.assign({}, requiredProps, {
diffMode: 'deleted',
newPath: '',
}),
);
setTimeout(() => {
createComponent({ ...allProps, diffMode: 'deleted', newPath: '' });
setImmediate(() => {
const metaInfoElement = vm.$el.querySelector('.image-info');
expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
expect(metaInfoElement).toHaveText('2.00 KiB');
 
done();
});
Loading
Loading
@@ -105,26 +109,40 @@ describe('ImageDiffViewer', () => {
components: {
imageDiffViewer,
},
template: `
<image-diff-viewer diff-mode="renamed" new-path="${GREEN_BOX_IMAGE_URL}" old-path="">
data: {
...allProps,
diffMode: 'renamed',
},
...compileToFunctions(`
<image-diff-viewer
:diff-mode="diffMode"
:new-path="newPath"
:old-path="oldPath"
:new-size="newSize"
:old-size="oldSize"
>
<span slot="image-overlay" class="overlay">test</span>
</image-diff-viewer>
`,
`),
}).$mount();
 
setTimeout(() => {
setImmediate(() => {
const metaInfoElement = vm.$el.querySelector('.image-info');
expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
expect(vm.$el.querySelector('.overlay')).not.toBe(null);
 
expect(metaInfoElement).toHaveText('2.00 KiB');
done();
});
});
 
describe('swipeMode', () => {
beforeEach(done => {
createComponent(requiredProps);
createComponent({ ...requiredProps });
 
setTimeout(() => {
setImmediate(() => {
done();
});
});
Loading
Loading
@@ -141,9 +159,9 @@ describe('ImageDiffViewer', () => {
 
describe('onionSkin', () => {
beforeEach(done => {
createComponent(requiredProps);
createComponent({ ...requiredProps });
 
setTimeout(() => {
setImmediate(() => {
done();
});
});
Loading
Loading
@@ -163,7 +181,7 @@ describe('ImageDiffViewer', () => {
vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
 
vm.$nextTick(() => {
dragSlider(vm.$el.querySelector('.dragger'));
dragSlider(vm.$el.querySelector('.dragger'), document, 20);
 
vm.$nextTick(() => {
expect(vm.$el.querySelector('.dragger').style.left).toBe('20px');
Loading
Loading
Loading
Loading
@@ -25,10 +25,19 @@ describe Gitlab::Checks::SnippetCheck do
 
context 'trying to create the branch' do
let(:oldrev) { '0000000000000000000000000000000000000000' }
let(:ref) { 'refs/heads/feature' }
 
it 'raises an error' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can not create or delete branches.')
end
context "when branch is 'master'" do
let(:ref) { 'refs/heads/master' }
it "allows the operation" do
expect { subject.exec }.not_to raise_error
end
end
end
end
end
Loading
Loading
@@ -426,6 +426,10 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
 
expect(pipeline_with_external_pr.external_pull_request).to be_persisted
end
it 'has no import failures' do
expect(@project.import_failures.size).to eq 0
end
end
end
end
Loading
Loading
@@ -499,6 +503,30 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
end
end
 
context 'multiple pipelines reference the same external pull request' do
before do
setup_import_export_config('multi_pipeline_ref_one_external_pr')
expect(restored_project_json).to eq(true)
end
it_behaves_like 'restores project successfully',
issues: 0,
labels: 0,
milestones: 0,
ci_pipelines: 2,
external_pull_requests: 1,
import_failures: 0
it 'restores external pull request for the restored pipelines' do
external_pr = project.external_pull_requests.first
project.ci_pipelines.each do |pipeline_with_external_pr|
expect(pipeline_with_external_pr.external_pull_request).to be_persisted
expect(pipeline_with_external_pr.external_pull_request).to eq(external_pr)
end
end
end
context 'when post import action throw non-retriable exception' do
let(:exception) { StandardError.new('post_import_error') }
 
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::SidekiqQueue do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
def add_job(user)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'args' => [user.id],
'meta.user' => user.username
)
end
describe '#drop_jobs!' do
shared_examples 'queue processing' do
let(:sidekiq_queue) { described_class.new('authorized_projects') }
let_it_be(:sidekiq_queue_user) { create(:user) }
before do
add_job(create(:user))
add_job(sidekiq_queue_user)
add_job(sidekiq_queue_user)
end
context 'when the queue is not processed in time' do
before do
calls = 0
allow(sidekiq_queue).to receive(:job_matches?).and_wrap_original do |m, *args|
raise Timeout::Error if calls > 0
calls += 1
m.call(*args)
end
end
it 'returns a non-completion flag, the number of jobs deleted, and the remaining queue size' do
expect(sidekiq_queue.drop_jobs!(search_metadata, timeout: 10))
.to eq(completed: false,
deleted_jobs: timeout_deleted,
queue_size: 3 - timeout_deleted)
end
end
context 'when the queue is processed in time' do
it 'returns a completion flag, the number of jobs deleted, and the remaining queue size' do
expect(sidekiq_queue.drop_jobs!(search_metadata, timeout: 10))
.to eq(completed: true,
deleted_jobs: no_timeout_deleted,
queue_size: 3 - no_timeout_deleted)
end
end
end
context 'when there are no matching jobs' do
include_examples 'queue processing' do
let(:search_metadata) { { project: 1 } }
let(:timeout_deleted) { 0 }
let(:no_timeout_deleted) { 0 }
end
end
context 'when there are matching jobs' do
include_examples 'queue processing' do
let(:search_metadata) { { user: sidekiq_queue_user.username } }
let(:timeout_deleted) { 1 }
let(:no_timeout_deleted) { 2 }
end
end
context 'when there are no valid metadata keys passed' do
it 'raises NoMetadataError' do
add_job(create(:user))
expect { described_class.new('authorized_projects').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::NoMetadataError)
end
end
context 'when the queue does not exist' do
it 'raises InvalidQueueError' do
expect { described_class.new('foo').drop_jobs!({ user: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::InvalidQueueError)
end
end
end
end
Loading
Loading
@@ -380,6 +380,12 @@ describe ApplicationSetting do
 
expect(subject).to be_invalid
end
it 'does not prevent from saving when gitaly timeouts were previously invalid' do
subject.update_column(:gitaly_timeout_default, Settings.gitlab.max_request_duration_seconds + 1)
expect(subject.reload).to be_valid
end
end
 
describe 'enforcing terms' do
Loading
Loading
Loading
Loading
@@ -475,43 +475,59 @@ describe WikiPage do
end
end
 
describe "#title" do
it "replaces a hyphen to a space" do
subject.title = "Import-existing-repositories-into-GitLab"
describe '#title_changed?' do
using RSpec::Parameterized::TableSyntax
 
expect(subject.title).to eq("Import existing repositories into GitLab")
let(:untitled_page) { described_class.new(wiki) }
let(:directory_page) do
create_page('parent/child', 'test content')
wiki.find_page('parent/child')
end
 
it 'unescapes html' do
subject.title = 'foo &amp; bar'
where(:page, :title, :changed) do
:untitled_page | nil | false
:untitled_page | 'new title' | true
 
expect(subject.title).to eq('foo & bar')
:new_page | nil | true
:new_page | 'test page' | true
:new_page | 'new title' | true
:existing_page | nil | false
:existing_page | 'test page' | false
:existing_page | '/test page' | false
:existing_page | 'new title' | true
:directory_page | nil | false
:directory_page | 'parent/child' | false
:directory_page | 'child' | false
:directory_page | '/child' | true
:directory_page | 'parent/other' | true
:directory_page | 'other/child' | true
end
with_them do
it 'returns the expected value' do
subject = public_send(page)
subject.title = title if title
expect(subject.title_changed?).to be(changed)
end
end
end
 
describe '#path' do
let(:path) { 'mypath.md' }
let(:git_page) { instance_double('Gitlab::Git::WikiPage', path: path).as_null_object }
it 'returns the path when persisted' do
page = described_class.new(wiki, git_page, true)
expect(page.path).to eq(path)
expect(existing_page.path).to eq('test-page.md')
end
 
it 'returns nil when not persisted' do
page = described_class.new(wiki, git_page, false)
expect(page.path).to be_nil
expect(new_page.path).to be_nil
end
end
 
describe '#directory' do
context 'when the page is at the root directory' do
subject do
create_page('file', 'content')
wiki.find_page('file')
end
subject { existing_page }
 
it 'returns an empty string' do
expect(subject.directory).to eq('')
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
describe API::Admin::Sidekiq do
let_it_be(:admin) { create(:admin) }
describe 'DELETE /admin/sidekiq/queues/:queue_name' do
context 'when the user is not an admin' do
it 'returns a 403' do
delete api("/admin/sidekiq/queues/authorized_projects?user=#{admin.username}", create(:user))
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when the user is an admin' do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
def add_job(user)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'args' => [user.id],
'meta.user' => user.username
)
end
context 'valid request' do
it 'returns info about the deleted jobs' do
add_job(admin)
add_job(admin)
add_job(create(:user))
delete api("/admin/sidekiq/queues/authorized_projects?user=#{admin.username}", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq('completed' => true,
'deleted_jobs' => 2,
'queue_size' => 1)
end
end
context 'when no required params are provided' do
it 'returns a 400' do
delete api("/admin/sidekiq/queues/authorized_projects?user_2=#{admin.username}", admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when the queue does not exist' do
it 'returns a 404' do
delete api("/admin/sidekiq/queues/authorized_projects_2?user=#{admin.username}", admin)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe 'Deleting Sidekiq jobs' do
include GraphqlHelpers
let_it_be(:admin) { create(:admin) }
let(:variables) { { user: admin.username, queue_name: 'authorized_projects' } }
let(:mutation) { graphql_mutation(:admin_sidekiq_queues_delete_jobs, variables) }
def mutation_response
graphql_mutation_response(:admin_sidekiq_queues_delete_jobs)
end
context 'when the user is not an admin' do
let(:current_user) { create(:user) }
it_behaves_like 'a mutation that returns top-level errors',
errors: ['You must be an admin to use this mutation']
end
context 'when the user is an admin' do
let(:current_user) { admin }
context 'valid request' do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
def add_job(user)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'args' => [user.id],
'meta.user' => user.username
)
end
it 'returns info about the deleted jobs' do
add_job(admin)
add_job(admin)
add_job(create(:user))
post_graphql_mutation(mutation, current_user: admin)
expect(mutation_response['errors']).to be_empty
expect(mutation_response['result']).to eq('completed' => true,
'deletedJobs' => 2,
'queueSize' => 1)
end
end
context 'when no required params are provided' do
let(:variables) { { queue_name: 'authorized_projects' } }
it_behaves_like 'a mutation that returns errors in the response',
errors: ['No metadata provided']
end
context 'when the queue does not exist' do
let(:variables) { { user: admin.username, queue_name: 'authorized_projects_2' } }
it_behaves_like 'a mutation that returns top-level errors',
errors: ['Queue authorized_projects_2 not found']
end
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment