Skip to content
Snippets Groups Projects
Commit 11e5d1b9 authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 7351a484
No related branches found
No related tags found
No related merge requests found
Showing
with 812 additions and 22 deletions
# frozen_string_literal: true
module API
class LsifData < Grape::API
MAX_FILE_SIZE = 10.megabytes
before do
not_found! if Feature.disabled?(:code_navigation, user_project)
end
params do
requires :id, type: String, desc: 'The ID of a project'
requires :commit_id, type: String, desc: 'The ID of a commit'
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
segment ':id/commits/:commit_id' do
params do
requires :path, type: String, desc: 'The path of a file'
end
get 'lsif/info' do
authorize! :download_code, user_project
artifact =
@project.job_artifacts
.with_file_types(['lsif'])
.for_sha(params[:commit_id])
.last
not_found! unless artifact
authorize! :read_pipeline, artifact.job.pipeline
file_too_large! if artifact.file.cached_size > MAX_FILE_SIZE
::Projects::LsifDataService.new(artifact.file, @project, params).execute
end
end
end
end
end
Loading
Loading
@@ -10169,6 +10169,12 @@ msgstr ""
msgid "Image %{imageName} was scheduled for deletion from the registry."
msgstr ""
 
msgid "Image ID"
msgstr ""
msgid "Image deleted successfully"
msgstr ""
msgid "Image: %{image}"
msgstr ""
 
Loading
Loading
@@ -11019,6 +11025,9 @@ msgstr ""
msgid "Last Seen"
msgstr ""
 
msgid "Last Updated"
msgstr ""
msgid "Last accessed on"
msgstr ""
 
Loading
Loading
@@ -17642,12 +17651,21 @@ msgstr ""
msgid "Something went wrong while closing the %{issuable}. Please try again later"
msgstr ""
 
msgid "Something went wrong while deleting the image."
msgstr ""
msgid "Something went wrong while deleting the package."
msgstr ""
 
msgid "Something went wrong while deleting the source branch. Please try again."
msgstr ""
 
msgid "Something went wrong while deleting the tag."
msgstr ""
msgid "Something went wrong while deleting the tags."
msgstr ""
msgid "Something went wrong while deleting your note. Please try again."
msgstr ""
 
Loading
Loading
@@ -17690,6 +17708,9 @@ msgstr ""
msgid "Something went wrong while fetching the registry list."
msgstr ""
 
msgid "Something went wrong while fetching the tags list."
msgstr ""
msgid "Something went wrong while initializing the OpenAPI viewer"
msgstr ""
 
Loading
Loading
@@ -18503,6 +18524,9 @@ msgstr ""
msgid "Tag"
msgstr ""
 
msgid "Tag deleted successfully"
msgstr ""
msgid "Tag list:"
msgstr ""
 
Loading
Loading
@@ -18521,6 +18545,9 @@ msgstr ""
msgid "Tags"
msgstr ""
 
msgid "Tags deleted successfully"
msgstr ""
msgid "Tags feed"
msgstr ""
 
Loading
Loading
# frozen_string_literal: true
 
module QA
context 'Plan', :orchestrated, :smtp, :reliable do
context 'Plan', :orchestrated, :smtp do
describe 'Email Notification' do
let(:user) do
Resource::User.fabricate_or_use(Runtime::Env.gitlab_qa_username_1, Runtime::Env.gitlab_qa_password_1)
Loading
Loading
Loading
Loading
@@ -139,6 +139,16 @@ FactoryBot.define do
end
end
 
trait :lsif do
file_type { :lsif }
file_format { :raw }
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/lsif.json.gz'), 'application/octet-stream')
end
end
trait :correct_checksum do
after(:build) do |artifact, evaluator|
artifact.file_sha256 = Digest::SHA256.file(artifact.file.path).hexdigest
Loading
Loading
Loading
Loading
@@ -15,6 +15,7 @@ describe 'Container Registry', :js do
project.add_developer(user)
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: [])
stub_feature_flags(vue_container_registry_explorer: false)
end
 
it 'has a page title set' do
Loading
Loading
Loading
Loading
@@ -16,6 +16,8 @@ describe 'Projects > Files > User creates a directory', :js do
project.add_developer(user)
sign_in(user)
visit project_tree_path(project, 'master')
wait_for_requests
end
 
context 'with default target branch' do
Loading
Loading
@@ -43,6 +45,25 @@ describe 'Projects > Files > User creates a directory', :js do
end
end
 
context 'inside sub-folder' do
it 'creates new directory' do
click_link 'files'
page.within('.repo-breadcrumb') do
expect(page).to have_link('files')
end
first('.add-to-tree').click
click_link('New directory')
fill_in(:dir_name, with: 'new_directory')
click_button('Create directory')
expect(page).to have_content('files')
expect(page).to have_content('new_directory')
end
end
context 'with a new target branch' do
before do
first('.add-to-tree').click
Loading
Loading
File added
export const reposServerResponse = [
{
destroy_path: 'path',
id: '123',
location: 'location',
path: 'foo',
tags_path: 'tags_path',
},
{
destroy_path: 'path_',
id: '456',
location: 'location_',
path: 'bar',
tags_path: 'tags_path_',
},
];
export const registryServerResponse = [
{
name: 'centos7',
short_revision: 'b118ab5b0',
revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
total_size: 679,
layers: 19,
location: 'location',
created_at: 1505828744434,
destroy_path: 'path_',
},
{
name: 'centos6',
short_revision: 'b118ab5b0',
revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
total_size: 679,
layers: 19,
location: 'location',
created_at: 1505828744434,
},
];
import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
import * as actions from '~/registry/explorer/stores/actions';
import * as types from '~/registry/explorer/stores/mutation_types';
import testAction from 'helpers/vuex_action_helper';
import createFlash from '~/flash';
import { TEST_HOST } from 'helpers/test_constants';
import { reposServerResponse, registryServerResponse } from '../mock_data';
jest.mock('~/flash.js');
describe('Actions RegistryExplorer Store', () => {
let mock;
const endpoint = `${TEST_HOST}/endpoint.json`;
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
it('sets initial state', done => {
const initialState = {
config: {
endpoint,
},
};
testAction(
actions.setInitialState,
initialState,
null,
[{ type: types.SET_INITIAL_STATE, payload: initialState }],
[],
done,
);
});
describe('receives api responses', () => {
const response = {
data: [1, 2, 3],
headers: {
page: 1,
perPage: 10,
},
};
it('images list response', done => {
testAction(
actions.receiveImagesListSuccess,
response,
null,
[
{ type: types.SET_IMAGES_LIST_SUCCESS, payload: response.data },
{ type: types.SET_PAGINATION, payload: response.headers },
],
[],
done,
);
});
it('tags list response', done => {
testAction(
actions.receiveTagsListSuccess,
response,
null,
[
{ type: types.SET_TAGS_LIST_SUCCESS, payload: response.data },
{ type: types.SET_TAGS_PAGINATION, payload: response.headers },
],
[],
done,
);
});
});
describe('fetch images list', () => {
it('sets the imagesList and pagination', done => {
mock.onGet(endpoint).replyOnce(200, reposServerResponse, {});
testAction(
actions.requestImagesList,
{},
{
config: {
endpoint,
},
},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[{ type: 'receiveImagesListSuccess', payload: { data: reposServerResponse, headers: {} } }],
done,
);
});
it('should create flash on error', done => {
testAction(
actions.requestImagesList,
{},
{
config: {
endpoint: null,
},
},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[],
() => {
expect(createFlash).toHaveBeenCalled();
done();
},
);
});
});
describe('fetch tags list', () => {
const url = window.btoa(`${endpoint}/1}`);
it('sets the tagsList', done => {
mock.onGet(window.atob(url)).replyOnce(200, registryServerResponse, {});
testAction(
actions.requestTagsList,
{ id: url },
{},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[
{
type: 'receiveTagsListSuccess',
payload: { data: registryServerResponse, headers: {} },
},
],
done,
);
});
it('should create flash on error', done => {
testAction(
actions.requestTagsList,
{ id: url },
{},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[],
() => {
expect(createFlash).toHaveBeenCalled();
done();
},
);
});
});
describe('request delete single tag', () => {
it('successfully performs the delete request', done => {
const deletePath = 'delete/path';
const url = window.btoa(`${endpoint}/1}`);
mock.onDelete(deletePath).replyOnce(200);
testAction(
actions.requestDeleteTag,
{
tag: {
destroy_path: deletePath,
},
imageId: url,
},
{
tagsPagination: {},
},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[
{
type: 'requestTagsList',
payload: { pagination: {}, id: url },
},
],
() => {
expect(createFlash).toHaveBeenCalled();
done();
},
);
});
it('should show flash message on error', done => {
testAction(
actions.requestDeleteTag,
{
tag: {
destroy_path: null,
},
},
{},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[],
() => {
expect(createFlash).toHaveBeenCalled();
done();
},
);
});
});
describe('request delete multiple tags', () => {
const imageId = 1;
const projectPath = 'project-path';
const url = `${projectPath}/registry/repository/${imageId}/tags/bulk_destroy`;
it('successfully performs the delete request', done => {
mock.onDelete(url).replyOnce(200);
testAction(
actions.requestDeleteTags,
{
ids: [1, 2],
imageId,
},
{
config: {
projectPath,
},
tagsPagination: {},
},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[
{
type: 'requestTagsList',
payload: { pagination: {}, id: 1 },
},
],
() => {
expect(createFlash).toHaveBeenCalled();
done();
},
);
});
it('should show flash message on error', done => {
mock.onDelete(url).replyOnce(500);
testAction(
actions.requestDeleteTags,
{
ids: [1, 2],
imageId,
},
{
config: {
projectPath,
},
tagsPagination: {},
},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[],
() => {
expect(createFlash).toHaveBeenCalled();
done();
},
);
});
});
describe('request delete single image', () => {
it('successfully performs the delete request', done => {
const deletePath = 'delete/path';
mock.onDelete(deletePath).replyOnce(200);
testAction(
actions.requestDeleteImage,
deletePath,
{
pagination: {},
},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[
{
type: 'requestImagesList',
payload: { pagination: {} },
},
],
() => {
expect(createFlash).toHaveBeenCalled();
done();
},
);
});
it('should show flash message on error', done => {
testAction(
actions.requestDeleteImage,
null,
{},
[
{ type: types.SET_MAIN_LOADING, payload: true },
{ type: types.SET_MAIN_LOADING, payload: false },
],
[],
() => {
expect(createFlash).toHaveBeenCalled();
done();
},
);
});
});
});
import mutations from '~/registry/explorer/stores/mutations';
import * as types from '~/registry/explorer/stores/mutation_types';
describe('Mutations Registry Explorer Store', () => {
let mockState;
beforeEach(() => {
mockState = {};
});
describe('SET_INITIAL_STATE', () => {
it('should set the initial state', () => {
const expectedState = { ...mockState, config: { endpoint: 'foo' } };
mutations[types.SET_INITIAL_STATE](mockState, { endpoint: 'foo' });
expect(mockState).toEqual(expectedState);
});
});
describe('SET_IMAGES_LIST_SUCCESS', () => {
it('should set the images list', () => {
const images = [1, 2, 3];
const expectedState = { ...mockState, images };
mutations[types.SET_IMAGES_LIST_SUCCESS](mockState, images);
expect(mockState).toEqual(expectedState);
});
});
describe('SET_TAGS_LIST_SUCCESS', () => {
it('should set the tags list', () => {
const tags = [1, 2, 3];
const expectedState = { ...mockState, tags };
mutations[types.SET_TAGS_LIST_SUCCESS](mockState, tags);
expect(mockState).toEqual(expectedState);
});
});
describe('SET_MAIN_LOADING', () => {
it('should set the isLoading', () => {
const expectedState = { ...mockState, isLoading: true };
mutations[types.SET_MAIN_LOADING](mockState, true);
expect(mockState).toEqual(expectedState);
});
});
describe('SET_PAGINATION', () => {
const generatePagination = () => [
{
'X-PAGE': '1',
'X-PER-PAGE': '20',
'X-TOTAL': '100',
'X-TOTAL-PAGES': '5',
'X-NEXT-PAGE': '2',
'X-PREV-PAGE': '0',
},
{
page: 1,
perPage: 20,
total: 100,
totalPages: 5,
nextPage: 2,
previousPage: 0,
},
];
it('should set the images pagination', () => {
const [headers, expectedResult] = generatePagination();
const expectedState = { ...mockState, pagination: expectedResult };
mutations[types.SET_PAGINATION](mockState, headers);
expect(mockState).toEqual(expectedState);
});
it('should set the tags pagination', () => {
const [headers, expectedResult] = generatePagination();
const expectedState = { ...mockState, tagsPagination: expectedResult };
mutations[types.SET_TAGS_PAGINATION](mockState, headers);
expect(mockState).toEqual(expectedState);
});
});
});
Loading
Loading
@@ -20,11 +20,18 @@ describe('updateElementsVisibility', () => {
});
 
describe('updateFormAction', () => {
it('updates form action', () => {
it.each`
path
${'/test'}
${'test'}
${'/'}
`('updates form action for $path', ({ path }) => {
setHTMLFixture('<form class="js-test" action="/"></form>');
 
updateFormAction('.js-test', '/gitlab/create', '/test');
updateFormAction('.js-test', '/gitlab/create', path);
 
expect(document.querySelector('.js-test').action).toBe('http://localhost/gitlab/create/test');
expect(document.querySelector('.js-test').action).toBe(
`http://localhost/gitlab/create/${path.replace(/^\//, '')}`,
);
});
});
Loading
Loading
@@ -43,7 +43,7 @@ describe Gitlab::Ci::Config::Entry::Reports do
:license_management | 'gl-license-management-report.json'
:license_scanning | 'gl-license-scanning-report.json'
:performance | 'performance.json'
:lsif | 'lsif.sqlite3'
:lsif | 'lsif.json'
end
 
with_them do
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
# Verifies that given an exported project meta-data tree, when importing this
# tree and then exporting it again, we should obtain the initial tree.
#
# This equivalence only works up to a certain extent, for instance we need
# to ignore:
#
# - row IDs and foreign key IDs
# - some timestamps
# - randomly generated fields like tokens
#
# as these are expected to change between import/export cycles.
describe Gitlab::ImportExport do
include ImportExport::CommonUtil
include ConfigurationHelper
include ImportExport::ProjectTreeExpectations
let(:json_fixture) { 'complex' }
it 'yields the initial tree when importing and exporting it again' do
project = create(:project, creator: create(:user, :admin))
# We first generate a test fixture dynamically from a seed-fixture, so as to
# account for any fields in the initial fixture that are missing and set to
# defaults during import (ideally we should have realistic test fixtures
# that "honestly" represent exports)
expect(
restore_then_save_project(
project,
import_path: seed_fixture_path,
export_path: test_fixture_path)
).to be true
# Import, then export again from the generated fixture. Any residual changes
# in the JSON will count towards comparison i.e. test failures.
expect(
restore_then_save_project(
project,
import_path: test_fixture_path,
export_path: test_tmp_path)
).to be true
imported_json = JSON.parse(File.read("#{test_fixture_path}/project.json"))
exported_json = JSON.parse(File.read("#{test_tmp_path}/project.json"))
assert_relations_match(imported_json, exported_json)
end
private
def seed_fixture_path
"#{fixtures_path}/#{json_fixture}"
end
def test_fixture_path
"#{test_tmp_path}/#{json_fixture}"
end
end
Loading
Loading
@@ -111,6 +111,18 @@ describe Ci::JobArtifact do
end
end
 
describe '.for_sha' do
it 'returns job artifacts for a given pipeline sha' do
first_pipeline = create(:ci_pipeline)
second_pipeline = create(:ci_pipeline, sha: Digest::SHA1.hexdigest(SecureRandom.hex))
first_artifact = create(:ci_job_artifact, job: create(:ci_build, pipeline: first_pipeline))
second_artifact = create(:ci_job_artifact, job: create(:ci_build, pipeline: second_pipeline))
expect(described_class.for_sha(first_pipeline.sha)).to eq([first_artifact])
expect(described_class.for_sha(second_pipeline.sha)).to eq([second_artifact])
end
end
describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) }
 
Loading
Loading
Loading
Loading
@@ -165,11 +165,25 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
describe '#exclusively_update_reactive_cache!' do
subject(:go!) { instance.exclusively_update_reactive_cache! }
 
shared_examples 'successful cache' do
it 'caches the result of #calculate_reactive_cache' do
go!
expect(read_reactive_cache(instance)).to eq(calculation.call)
end
it 'does not raise the exception' do
expect { go! }.not_to raise_exception(ReactiveCaching::ExceededReactiveCacheLimit)
end
end
context 'when the lease is free and lifetime is not exceeded' do
before do
stub_reactive_cache(instance, "preexisting")
stub_reactive_cache(instance, 'preexisting')
end
 
it_behaves_like 'successful cache'
it 'takes and releases the lease' do
expect_to_obtain_exclusive_lease(cache_key, 'uuid')
expect_to_cancel_exclusive_lease(cache_key, 'uuid')
Loading
Loading
@@ -177,19 +191,13 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
go!
end
 
it 'caches the result of #calculate_reactive_cache' do
go!
expect(read_reactive_cache(instance)).to eq(calculation.call)
end
it "enqueues a repeat worker" do
it 'enqueues a repeat worker' do
expect_reactive_cache_update_queued(instance)
 
go!
end
 
it "calls a reactive_cache_updated only once if content did not change on subsequent update" do
it 'calls a reactive_cache_updated only once if content did not change on subsequent update' do
expect(instance).to receive(:calculate_reactive_cache).twice
expect(instance).to receive(:reactive_cache_updated).once
 
Loading
Loading
@@ -202,6 +210,43 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
go!
end
 
context 'when calculated object size exceeds default reactive_cache_hard_limit' do
let(:calculation) { -> { 'a' * 2 * 1.megabyte } }
shared_examples 'ExceededReactiveCacheLimit' do
it 'raises ExceededReactiveCacheLimit exception and does not cache new data' do
expect { go! }.to raise_exception(ReactiveCaching::ExceededReactiveCacheLimit)
expect(read_reactive_cache(instance)).not_to eq(calculation.call)
end
end
context 'when reactive_cache_hard_limit feature flag is enabled' do
it_behaves_like 'ExceededReactiveCacheLimit'
context 'when reactive_cache_hard_limit is overridden' do
let(:test_class) { Class.new(CacheTest) { self.reactive_cache_hard_limit = 3.megabytes } }
let(:instance) { test_class.new(666, &calculation) }
it_behaves_like 'successful cache'
context 'when cache size is over the overridden limit' do
let(:calculation) { -> { 'a' * 4 * 1.megabyte } }
it_behaves_like 'ExceededReactiveCacheLimit'
end
end
end
context 'when reactive_cache_limit feature flag is disabled' do
before do
stub_feature_flags(reactive_cache_limit: false)
end
it_behaves_like 'successful cache'
end
end
context 'and #calculate_reactive_cache raises an exception' do
before do
stub_reactive_cache(instance, "preexisting")
Loading
Loading
@@ -256,8 +301,8 @@ describe ReactiveCaching, :use_clean_rails_memory_store_caching do
it { expect(subject.reactive_cache_lease_timeout).to be_a(ActiveSupport::Duration) }
it { expect(subject.reactive_cache_refresh_interval).to be_a(ActiveSupport::Duration) }
it { expect(subject.reactive_cache_lifetime).to be_a(ActiveSupport::Duration) }
it { expect(subject.reactive_cache_key).to respond_to(:call) }
it { expect(subject.reactive_cache_hard_limit).to be_a(Integer) }
it { expect(subject.reactive_cache_worker_finder).to respond_to(:call) }
end
end
# frozen_string_literal: true
require 'spec_helper'
describe API::API do
let(:user) { create(:user, last_activity_on: Date.yesterday) }
describe 'Record user last activity in after hook' do
# It does not matter which endpoint is used because last_activity_on should
# be updated on every request. `/groups` is used as an example
# to represent any API endpoint
it 'updates the users last_activity_on date' do
expect { get api('/groups', user) }.to change { user.reload.last_activity_on }.to(Date.today)
end
context 'when the the api_activity_logging feature is disabled' do
it 'does not touch last_activity_on' do
stub_feature_flags(api_activity_logging: false)
expect { get api('/groups', user) }.not_to change { user.reload.last_activity_on }
end
end
end
end
# frozen_string_literal: true
require "spec_helper"
describe API::LsifData do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let(:commit) { project.commit }
describe 'GET lsif/info' do
let(:endpoint_path) { "/projects/#{project.id}/commits/#{commit.id}/lsif/info" }
context 'user does not have access to the project' do
before do
project.add_guest(user)
end
it 'returns 403' do
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'user has access to the project' do
before do
project.add_reporter(user)
end
context 'code_navigation feature is disabled' do
before do
stub_feature_flags(code_navigation: false)
end
it 'returns 404' do
get api(endpoint_path, user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'there is no job artifact for the passed commit' do
it 'returns 404' do
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'lsif data is stored as a job artifact' do
let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id) }
let!(:artifact) { create(:ci_job_artifact, :lsif, job: create(:ci_build, pipeline: pipeline)) }
it 'returns code navigation info for a given path' do
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:ok)
expect(response.parsed_body.last).to eq({
'end_char' => 18,
'end_line' => 8,
'start_char' => 13,
'start_line' => 8
})
end
context 'the stored file is too large' do
it 'returns 413' do
allow_any_instance_of(JobArtifactUploader).to receive(:cached_size).and_return(20.megabytes)
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:payload_too_large)
end
end
context 'the user does not have access to the pipeline' do
let(:project) { create(:project, :repository, builds_access_level: ProjectFeature::DISABLED) }
it 'returns 403' do
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
end
end
end
Loading
Loading
@@ -148,6 +148,7 @@ describe API::ProjectContainerRepositories do
let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
 
it 'schedules cleanup of tags repository' do
stub_last_activity_update
stub_exclusive_lease(lease_key, timeout: 1.hour)
expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
.with(maintainer.id, root_repository.id, worker_params)
Loading
Loading
Loading
Loading
@@ -1462,7 +1462,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
subject
 
expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
end
Loading
Loading
@@ -1482,7 +1482,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
subject
 
expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response).not_to have_key('TempPath')
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
Loading
Loading
@@ -1558,7 +1558,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
authorize_artifacts_with_token_in_headers
 
expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).not_to be_nil
end
 
Loading
Loading
Loading
Loading
@@ -92,7 +92,7 @@ describe 'Git HTTP requests' do
it 'allows pulls' do
download(path, env) do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
end
Loading
Loading
@@ -101,7 +101,7 @@ describe 'Git HTTP requests' do
it 'allows pushes', :sidekiq_might_not_need_inline do
upload(path, env) do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
end
Loading
Loading
@@ -509,7 +509,7 @@ describe 'Git HTTP requests' do
 
download(path, env) do
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
 
Loading
Loading
@@ -518,7 +518,7 @@ describe 'Git HTTP requests' do
 
upload(path, env) do
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
 
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment