Skip to content
Snippets Groups Projects
Commit 561e1b47 authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 7b52c7cb
No related branches found
No related tags found
No related merge requests found
Showing
with 411 additions and 103 deletions
Loading
Loading
@@ -1041,7 +1041,7 @@ POST /projects
| `ci_config_path` | string | no | The path to CI config file |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins |
| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
Loading
Loading
@@ -1109,7 +1109,7 @@ POST /projects/user/:user_id
| `ci_config_path` | string | no | The path to CI config file |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins |
| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
Loading
Loading
@@ -1177,7 +1177,7 @@ PUT /projects/:id
| `ci_default_git_depth` | integer | no | Default number of revisions for [shallow cloning](../user/project/pipelines/settings.md#git-shallow-clone) |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins |
| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge request by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
Loading
Loading
Loading
Loading
@@ -136,3 +136,5 @@ Incident Management features can be easily enabled & disabled via the Project se
#### Auto-creation
 
GitLab Issues can automatically be created as a result of an Alert notification. An Issue created this way will contain error information to help you further debug the error.
For [GitLab-managed alerting rules](../project/integrations/prometheus.md#setting-up-alerts-for-prometheus-metrics-ultimate), the issue will include an embedded chart for the query corresponding to the alert. The chart will show an hour of data surrounding the starting point of the incident, 30 minutes before and after.
Loading
Loading
@@ -106,6 +106,9 @@ module API
project.auto_devops.nil? ? 'continuous' : project.auto_devops.deploy_strategy
end
expose :autoclose_referenced_issues
expose :repository_storage, if: ->(project, options) {
Ability.allowed?(options[:current_user], :change_repository_storage, project)
}
 
# rubocop: disable CodeReuse/ActiveRecord
def self.preload_relation(projects_relation, options = {})
Loading
Loading
Loading
Loading
@@ -54,6 +54,7 @@ module API
optional :auto_devops_enabled, type: Boolean, desc: 'Flag indication if Auto DevOps is enabled'
optional :auto_devops_deploy_strategy, type: String, values: %w(continuous manual timed_incremental), desc: 'Auto Deploy strategy'
optional :autoclose_referenced_issues, type: Boolean, desc: 'Flag indication if referenced issues auto-closing is enabled'
optional :repository_storage, type: String, desc: 'Which storage shard the repository is on. Available only to admins'
end
 
params :optional_project_params_ee do
Loading
Loading
@@ -125,6 +126,7 @@ module API
:wiki_access_level,
:avatar,
:suggestion_commit_message,
:repository_storage,
 
# TODO: remove in API v5, replaced by *_access_level
:issues_enabled,
Loading
Loading
Loading
Loading
@@ -15,7 +15,7 @@ module API
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
segment ':id/commits/:commit_id' do
params do
requires :path, type: String, desc: 'The path of a file'
requires :paths, type: Array, desc: 'The paths of the files'
end
get 'lsif/info' do
authorize! :download_code, user_project
Loading
Loading
@@ -30,7 +30,9 @@ module API
authorize! :read_pipeline, artifact.job.pipeline
file_too_large! if artifact.file.cached_size > MAX_FILE_SIZE
 
::Projects::LsifDataService.new(artifact.file, @project, params).execute
service = ::Projects::LsifDataService.new(artifact.file, @project, params[:commit_id])
params[:paths].to_h { |path| [path, service.execute(path)] }
end
end
end
Loading
Loading
Loading
Loading
@@ -69,7 +69,16 @@ module API
end
params do
requires :repository_id, type: Integer, desc: 'The ID of the repository'
requires :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
optional :name_regex_delete, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
# require either name_regex (deprecated) or name_regex_delete, it is ok to have both
given name_regex_delete: ->(val) { val.nil? } do
requires :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
end
optional :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
given name_regex: ->(val) { val.nil? } do
requires :name_regex_delete, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
end
optional :name_regex_keep, type: String, desc: 'The tag name regexp to retain'
optional :keep_n, type: Integer, desc: 'Keep n of latest tags with matching name'
optional :older_than, type: String, desc: 'Delete older than: 1h, 1d, 1month'
end
Loading
Loading
Loading
Loading
@@ -25,6 +25,7 @@ module API
end
 
def verify_update_project_attrs!(project, attrs)
attrs.delete(:repository_storage) unless can?(current_user, :change_repository_storage, project)
end
 
def delete_project(user_project)
Loading
Loading
Loading
Loading
@@ -118,10 +118,9 @@ module Gitlab
\.haml-lint_todo.yml |
babel\.config\.js |
jest\.config\.js |
karma\.config\.js |
webpack\.config\.js |
package\.json |
yarn\.lock |
config/.+\.js |
\.gitlab/ci/frontend\.gitlab-ci\.yml
)\z}x => :frontend,
 
Loading
Loading
namespace :gitlab do
namespace :cleanup do
desc "GitLab | Cleanup | Delete moved repositories"
task moved: :gitlab_environment do
warn_user_is_not_gitlab
remove_flag = ENV['REMOVE']
Gitlab.config.repositories.storages.each do |name, repository_storage|
repo_root = repository_storage.legacy_disk_path.chomp('/')
# Look for global repos (legacy, depth 1) and normal repos (depth 2)
IO.popen(%W(find #{repo_root} -mindepth 1 -maxdepth 2 -name *+moved*.git)) do |find|
find.each_line do |path|
path.chomp!
if remove_flag
if FileUtils.rm_rf(path)
puts "Removed...#{path}".color(:green)
else
puts "Cannot remove #{path}".color(:red)
end
else
puts "Can be removed: #{path}".color(:green)
end
end
end
end
unless remove_flag
puts "To cleanup these repositories run this command with REMOVE=true".color(:yellow)
end
end
end
end
Loading
Loading
@@ -45,18 +45,20 @@ describe('Code navigation actions', () => {
 
describe('success', () => {
beforeEach(() => {
mock.onGet(apiUrl).replyOnce(200, [
{
start_line: 0,
start_char: 0,
hover: { value: '123' },
},
{
start_line: 1,
start_char: 0,
hover: null,
},
]);
mock.onGet(apiUrl).replyOnce(200, {
index: [
{
start_line: 0,
start_char: 0,
hover: { value: '123' },
},
{
start_line: 1,
start_char: 0,
hover: null,
},
],
});
});
 
it('commits REQUEST_DATA_SUCCESS with normalized data', done => {
Loading
Loading
Loading
Loading
@@ -75,6 +75,16 @@ describe Resolvers::Projects::SnippetsResolver do
expect(resolve_snippets(context: { current_user: other_user }, args: { ids: project_snippet.to_global_id })).to be_empty
end
end
context 'when project snippets are disabled' do
it 'raises an error' do
disabled_snippet_project = create(:project, :snippets_disabled)
disabled_snippet_project.add_developer(current_user)
expect(SnippetsFinder).not_to receive(:new)
expect { resolve_snippets(obj: disabled_snippet_project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end
 
def resolve_snippets(args: {}, context: { current_user: current_user }, obj: project)
Loading
Loading
Loading
Loading
@@ -175,9 +175,12 @@ describe Gitlab::Danger::Helper do
'spec/javascripts/foo' | :frontend
'spec/frontend/bar' | :frontend
'vendor/assets/foo' | :frontend
'babel.config.js' | :frontend
'jest.config.js' | :frontend
'package.json' | :frontend
'yarn.lock' | :frontend
'config/foo.js' | :frontend
'config/deep/foo.js' | :frontend
 
'ee/app/assets/foo' | :frontend
'ee/app/views/foo' | :frontend
Loading
Loading
Loading
Loading
@@ -2822,6 +2822,44 @@ describe Project do
end
end
 
describe '#change_repository_storage' do
let(:project) { create(:project, :repository) }
let(:read_only_project) { create(:project, :repository, repository_read_only: true) }
before do
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
end
it 'schedules the transfer of the repository to the new storage and locks the project' do
expect(ProjectUpdateRepositoryStorageWorker).to receive(:perform_async).with(project.id, 'test_second_storage')
project.change_repository_storage('test_second_storage')
project.save!
expect(project).to be_repository_read_only
end
it "doesn't schedule the transfer if the repository is already read-only" do
expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
read_only_project.change_repository_storage('test_second_storage')
read_only_project.save!
end
it "doesn't lock or schedule the transfer if the storage hasn't changed" do
expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
project.change_repository_storage(project.repository_storage)
project.save!
expect(project).not_to be_repository_read_only
end
it 'throws an error if an invalid repository storage is provided' do
expect { project.change_repository_storage('unknown') }.to raise_error(ArgumentError)
end
end
describe '#pushes_since_gc' do
let(:project) { create(:project) }
 
Loading
Loading
Loading
Loading
@@ -4,36 +4,73 @@ require 'spec_helper'
 
describe SnippetBlobPresenter do
describe '#rich_data' do
let(:snippet) { build(:personal_snippet) }
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:current_user).and_return(nil)
end
end
 
subject { described_class.new(snippet.blob).rich_data }
 
it 'returns nil when the snippet blob is binary' do
allow(snippet.blob).to receive(:binary?).and_return(true)
context 'with PersonalSnippet' do
let(:raw_url) { "http://127.0.0.1:3000/snippets/#{snippet.id}/raw" }
let(:snippet) { build(:personal_snippet) }
 
expect(subject).to be_nil
end
it 'returns nil when the snippet blob is binary' do
allow(snippet.blob).to receive(:binary?).and_return(true)
 
it 'returns markdown content when snippet file is markup' do
snippet.file_name = 'test.md'
snippet.content = '*foo*'
expect(subject).to be_nil
end
 
expect(subject).to eq '<p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>'
end
context 'with markdown format' do
let(:snippet) { create(:personal_snippet, file_name: 'test.md', content: '*foo*') }
 
it 'returns syntax highlighted content' do
snippet.file_name = 'test.rb'
snippet.content = 'class Foo;end'
it 'returns rich markdown content' do
expected = <<~HTML
<div class="file-content md">
<p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>
</div>
HTML
 
expect(subject)
.to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
end
expect(subject).to eq(expected)
end
end
 
it 'returns plain text highlighted content' do
snippet.file_name = 'test'
snippet.content = 'foo'
context 'with notebook format' do
let(:snippet) { create(:personal_snippet, file_name: 'test.ipynb') }
 
expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
it 'returns rich notebook content' do
expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-notebook-viewer"></div>)
end
end
context 'with openapi format' do
let(:snippet) { create(:personal_snippet, file_name: 'openapi.yml') }
it 'returns rich openapi content' do
expect(subject).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-openapi-viewer"></div>\n)
end
end
context 'with svg format' do
let(:snippet) { create(:personal_snippet, file_name: 'test.svg') }
it 'returns rich svg content' do
result = Nokogiri::HTML::DocumentFragment.parse(subject)
image_tag = result.search('img').first
expect(image_tag.attr('src')).to include("data:#{snippet.blob.mime_type};base64")
expect(image_tag.attr('alt')).to eq('test.svg')
end
end
context 'with other format' do
let(:snippet) { create(:personal_snippet, file_name: 'test') }
it 'does not return no rich content' do
expect(subject).to be_nil
end
end
end
end
 
Loading
Loading
@@ -55,19 +92,19 @@ describe SnippetBlobPresenter do
expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>'
end
 
it 'returns plain syntax content' do
it 'returns highlighted syntax content' do
snippet.file_name = 'test.rb'
snippet.content = 'class Foo;end'
 
expect(subject)
.to eq '<span id="LC1" class="line" lang="">class Foo;end</span>'
.to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
end
 
it 'returns plain text highlighted content' do
snippet.file_name = 'test'
snippet.content = 'foo'
 
expect(subject).to eq '<span id="LC1" class="line" lang="">foo</span>'
expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
end
end
 
Loading
Loading
Loading
Loading
@@ -67,7 +67,7 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
 
expect(mutation_response['snippet']['blob']['richData']).to match(content)
expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
Loading
Loading
@@ -93,7 +93,7 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
 
expect(mutation_response['snippet']['blob']['richData']).to match(content)
expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
Loading
Loading
Loading
Loading
@@ -56,7 +56,7 @@ describe 'Updating a Snippet' do
it 'returns the updated Snippet' do
post_graphql_mutation(mutation, current_user: current_user)
 
expect(mutation_response['snippet']['blob']['richData']).to match(updated_content)
expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(updated_content)
expect(mutation_response['snippet']['title']).to eq(updated_title)
expect(mutation_response['snippet']['description']).to eq(updated_description)
Loading
Loading
@@ -78,7 +78,7 @@ describe 'Updating a Snippet' do
it 'returns the Snippet with its original values' do
post_graphql_mutation(mutation, current_user: current_user)
 
expect(mutation_response['snippet']['blob']['richData']).to match(original_content)
expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(original_content)
expect(mutation_response['snippet']['title']).to eq(original_title)
expect(mutation_response['snippet']['description']).to eq(original_description)
Loading
Loading
Loading
Loading
@@ -9,18 +9,20 @@ describe API::LsifData do
let(:commit) { project.commit }
 
describe 'GET lsif/info' do
let(:endpoint_path) { "/projects/#{project.id}/commits/#{commit.id}/lsif/info" }
subject do
endpoint_path = "/projects/#{project.id}/commits/#{commit.id}/lsif/info"
get api(endpoint_path, user), params: { paths: ['main.go', 'morestrings/reverse.go'] }
response
end
 
context 'user does not have access to the project' do
before do
project.add_guest(user)
end
 
it 'returns 403' do
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:forbidden)
end
it { is_expected.to have_gitlab_http_status(:forbidden) }
end
 
context 'user has access to the project' do
Loading
Loading
@@ -28,35 +30,27 @@ describe API::LsifData do
project.add_reporter(user)
end
 
context 'code_navigation feature is disabled' do
before do
stub_feature_flags(code_navigation: false)
end
it 'returns 404' do
get api(endpoint_path, user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'there is no job artifact for the passed commit' do
it 'returns 404' do
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:not_found)
end
it { is_expected.to have_gitlab_http_status(:not_found) }
end
 
context 'lsif data is stored as a job artifact' do
let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id) }
let!(:artifact) { create(:ci_job_artifact, :lsif, job: create(:ci_build, pipeline: pipeline)) }
 
it 'returns code navigation info for a given path' do
get api(endpoint_path, user), params: { path: 'main.go' }
context 'code_navigation feature is disabled' do
before do
stub_feature_flags(code_navigation: false)
end
 
expect(response).to have_gitlab_http_status(:ok)
expect(response.parsed_body.last).to eq({
it { is_expected.to have_gitlab_http_status(:not_found) }
end
it 'returns code navigation info for a given path', :aggregate_failures do
expect(subject).to have_gitlab_http_status(:ok)
data_for_main = response.parsed_body['main.go']
expect(data_for_main.last).to eq({
'end_char' => 18,
'end_line' => 8,
'start_char' => 13,
Loading
Loading
@@ -67,26 +61,33 @@ describe API::LsifData do
'value' => Gitlab::Highlight.highlight(nil, 'func Func2(i int) string', language: 'go')
}]
})
data_for_reverse = response.parsed_body['morestrings/reverse.go']
expect(data_for_reverse.last).to eq({
'end_char' => 9,
'end_line' => 7,
'start_char' => 8,
'start_line' => 7,
'definition_url' => project_blob_path(project, "#{commit.id}/morestrings/reverse.go", anchor: 'L6'),
'hover' => [{
'language' => 'go',
'value' => Gitlab::Highlight.highlight(nil, 'var b string', language: 'go')
}]
})
end
 
context 'the stored file is too large' do
it 'returns 413' do
before do
allow_any_instance_of(JobArtifactUploader).to receive(:cached_size).and_return(20.megabytes)
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:payload_too_large)
end
it { is_expected.to have_gitlab_http_status(:payload_too_large) }
end
 
context 'the user does not have access to the pipeline' do
let(:project) { create(:project, :repository, builds_access_level: ProjectFeature::DISABLED) }
 
it 'returns 403' do
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:forbidden)
end
it { is_expected.to have_gitlab_http_status(:forbidden) }
end
end
end
Loading
Loading
Loading
Loading
@@ -109,7 +109,7 @@ describe API::ProjectContainerRepositories do
 
context 'disallowed' do
let(:params) do
{ name_regex: 'v10.*' }
{ name_regex_delete: 'v10.*' }
end
 
it_behaves_like 'rejected container repository access', :developer, :forbidden
Loading
Loading
@@ -130,16 +130,33 @@ describe API::ProjectContainerRepositories do
end
end
 
context 'without name_regex' do
let(:params) do
{ keep_n: 100,
older_than: '1 day',
other: 'some value' }
end
it 'returns bad request' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'passes all declared parameters' do
let(:params) do
{ name_regex: 'v10.*',
{ name_regex_delete: 'v10.*',
name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
other: 'some value' }
end
 
let(:worker_params) do
{ name_regex: 'v10.*',
{ name_regex: nil,
name_regex_delete: 'v10.*',
name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
container_expiration_policy: false }
Loading
Loading
@@ -174,6 +191,38 @@ describe API::ProjectContainerRepositories do
end
end
end
context 'with deprecated name_regex param' do
let(:params) do
{ name_regex: 'v10.*',
name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
other: 'some value' }
end
let(:worker_params) do
{ name_regex: 'v10.*',
name_regex_delete: nil,
name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
container_expiration_policy: false }
end
let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
it 'schedules cleanup of tags repository' do
stub_last_activity_update
stub_exclusive_lease(lease_key, timeout: 1.hour)
expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
.with(maintainer.id, root_repository.id, worker_params)
subject
expect(response).to have_gitlab_http_status(:accepted)
end
end
end
end
 
Loading
Loading
Loading
Loading
@@ -1751,6 +1751,27 @@ describe API::Projects do
 
subject { get api("/projects/#{project.id}", user) }
end
describe 'repository_storage attribute' do
before do
get api("/projects/#{project.id}", user)
end
context 'when authenticated as an admin' do
let(:user) { create(:admin) }
it 'returns repository_storage attribute' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['repository_storage']).to eq(project.repository_storage)
end
end
context 'when authenticated as a regular user' do
it 'does not return repository_storage attribute' do
expect(json_response).not_to have_key('repository_storage')
end
end
end
end
 
describe 'GET /projects/:id/users' do
Loading
Loading
@@ -2402,6 +2423,50 @@ describe API::Projects do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when updating repository storage' do
let(:unknown_storage) { 'new-storage' }
let(:new_project) { create(:project, :repository, namespace: user.namespace) }
context 'as a user' do
it 'returns 200 but does not change repository_storage' do
expect do
Sidekiq::Testing.fake! do
put(api("/projects/#{new_project.id}", user), params: { repository_storage: unknown_storage, issues_enabled: false })
end
end.not_to change(ProjectUpdateRepositoryStorageWorker.jobs, :size)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['issues_enabled']).to eq(false)
expect(new_project.reload.repository.storage).to eq('default')
end
end
context 'as an admin' do
include_context 'custom session'
let(:admin) { create(:admin) }
it 'returns 500 when repository storage is unknown' do
put(api("/projects/#{new_project.id}", admin), params: { repository_storage: unknown_storage })
expect(response).to have_gitlab_http_status(:internal_server_error)
expect(json_response['message']).to match('ArgumentError')
end
it 'returns 200 when repository storage has changed' do
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' })
expect do
Sidekiq::Testing.fake! do
put(api("/projects/#{new_project.id}", admin), params: { repository_storage: 'test_second_storage' })
end
end.to change(ProjectUpdateRepositoryStorageWorker.jobs, :size).by(1)
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end
 
describe 'POST /projects/:id/archive' do
Loading
Loading
Loading
Loading
@@ -48,25 +48,37 @@ describe Projects::ContainerRepository::CleanupTagsService do
end
 
context 'when regex matching everything is specified' do
shared_examples 'removes all matches' do
it 'does remove B* and C' do
# The :A cannot be removed as config is shared with :latest
# The :E cannot be removed as it does not have valid manifest
expect_delete('sha256:configB').twice
expect_delete('sha256:configC')
expect_delete('sha256:configD')
is_expected.to include(status: :success, deleted: %w(D Bb Ba C))
end
end
let(:params) do
{ 'name_regex' => '.*' }
{ 'name_regex_delete' => '.*' }
end
 
it 'does remove B* and C' do
# The :A cannot be removed as config is shared with :latest
# The :E cannot be removed as it does not have valid manifest
it_behaves_like 'removes all matches'
 
expect_delete('sha256:configB').twice
expect_delete('sha256:configC')
expect_delete('sha256:configD')
context 'with deprecated name_regex param' do
let(:params) do
{ 'name_regex' => '.*' }
end
 
is_expected.to include(status: :success, deleted: %w(D Bb Ba C))
it_behaves_like 'removes all matches'
end
end
 
context 'when regex matching specific tags is used' do
context 'when delete regex matching specific tags is used' do
let(:params) do
{ 'name_regex' => 'C|D' }
{ 'name_regex_delete' => 'C|D' }
end
 
it 'does remove C and D' do
Loading
Loading
@@ -75,11 +87,37 @@ describe Projects::ContainerRepository::CleanupTagsService do
 
is_expected.to include(status: :success, deleted: %w(D C))
end
context 'with overriding allow regex' do
let(:params) do
{ 'name_regex_delete' => 'C|D',
'name_regex_keep' => 'C' }
end
it 'does not remove C' do
expect_delete('sha256:configD')
is_expected.to include(status: :success, deleted: %w(D))
end
end
context 'with name_regex_delete overriding deprecated name_regex' do
let(:params) do
{ 'name_regex' => 'C|D',
'name_regex_delete' => 'D' }
end
it 'does not remove C' do
expect_delete('sha256:configD')
is_expected.to include(status: :success, deleted: %w(D))
end
end
end
 
context 'when removing a tagged image that is used by another tag' do
let(:params) do
{ 'name_regex' => 'Ba' }
{ 'name_regex_delete' => 'Ba' }
end
 
it 'does not remove the tag' do
Loading
Loading
@@ -89,9 +127,23 @@ describe Projects::ContainerRepository::CleanupTagsService do
end
end
 
context 'with allow regex value' do
let(:params) do
{ 'name_regex_delete' => '.*',
'name_regex_keep' => 'B.*' }
end
it 'does not remove B*' do
expect_delete('sha256:configC')
expect_delete('sha256:configD')
is_expected.to include(status: :success, deleted: %w(D C))
end
end
context 'when removing keeping only 3' do
let(:params) do
{ 'name_regex' => '.*',
{ 'name_regex_delete' => '.*',
'keep_n' => 3 }
end
 
Loading
Loading
@@ -104,7 +156,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
 
context 'when removing older than 1 day' do
let(:params) do
{ 'name_regex' => '.*',
{ 'name_regex_delete' => '.*',
'older_than' => '1 day' }
end
 
Loading
Loading
@@ -118,7 +170,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
 
context 'when combining all parameters' do
let(:params) do
{ 'name_regex' => '.*',
{ 'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day' }
end
Loading
Loading
@@ -136,7 +188,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
 
context 'with valid container_expiration_policy param' do
let(:params) do
{ 'name_regex' => '.*',
{ 'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day',
'container_expiration_policy' => true }
Loading
Loading
@@ -152,7 +204,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
 
context 'without container_expiration_policy param' do
let(:params) do
{ 'name_regex' => '.*',
{ 'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day' }
end
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment