Skip to content
Snippets Groups Projects
Commit 2c156e3c authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 8e129497
No related branches found
No related tags found
No related merge requests found
Showing
with 944 additions and 61 deletions
Loading
Loading
@@ -6,7 +6,7 @@ describe Gitlab::Ci::Parsers do
describe '.fabricate!' do
subject { described_class.fabricate!(file_type) }
 
context 'when file_type exists' do
context 'when file_type is junit' do
let(:file_type) { 'junit' }
 
it 'fabricates the class' do
Loading
Loading
@@ -14,6 +14,14 @@ describe Gitlab::Ci::Parsers do
end
end
 
context 'when file_type is cobertura' do
let(:file_type) { 'cobertura' }
it 'fabricates the class' do
is_expected.to be_a(described_class::Coverage::Cobertura)
end
end
context 'when file_type does not exist' do
let(:file_type) { 'undefined' }
 
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Reports::CoverageReports do
let(:coverage_report) { described_class.new }
it { expect(coverage_report.files).to eq({}) }
describe '#pick' do
before do
coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
coverage_report.add_file('routes.rb', { 3 => 1, 4 => 0 })
end
it 'returns only picked files while ignoring nonexistent ones' do
expect(coverage_report.pick(['routes.rb', 'nonexistent.txt'])).to eq({
files: { 'routes.rb' => { 3 => 1, 4 => 0 } }
})
end
end
describe '#add_file' do
context 'when providing two individual files' do
before do
coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
coverage_report.add_file('routes.rb', { 3 => 1, 4 => 0 })
end
it 'initializes a new test suite and returns it' do
expect(coverage_report.files).to eq({
'app.rb' => { 1 => 0, 2 => 1 },
'routes.rb' => { 3 => 1, 4 => 0 }
})
end
end
context 'when providing the same files twice' do
context 'with different line coverage' do
before do
coverage_report.add_file('admin.rb', { 1 => 0, 2 => 1 })
coverage_report.add_file('admin.rb', { 3 => 1, 4 => 0 })
end
it 'initializes a new test suite and returns it' do
expect(coverage_report.files).to eq({
'admin.rb' => { 1 => 0, 2 => 1, 3 => 1, 4 => 0 }
})
end
end
context 'with identical line coverage' do
before do
coverage_report.add_file('projects.rb', { 1 => 0, 2 => 1 })
coverage_report.add_file('projects.rb', { 1 => 0, 2 => 1 })
end
it 'initializes a new test suite and returns it' do
expect(coverage_report.files).to eq({
'projects.rb' => { 1 => 0, 2 => 2 }
})
end
end
end
end
end
Loading
Loading
@@ -197,6 +197,6 @@ describe Gitlab::ImportExport::Group::TreeSaver do
end
 
def group_json(filename)
JSON.parse(IO.read(filename))
::JSON.parse(IO.read(filename))
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::JSON::LegacyWriter do
let(:path) { "#{Dir.tmpdir}/legacy_writer_spec/test.json" }
subject { described_class.new(path) }
after do
FileUtils.rm_rf(path)
end
describe "#write" do
context "when key is already written" do
it "raises exception" do
key = "key"
value = "value"
subject.write(key, value)
expect { subject.write(key, "new value") }.to raise_exception("key '#{key}' already written")
end
end
context "when key is not already written" do
context "when multiple key value pairs are stored" do
it "writes correct json" do
expected_hash = { "key" => "value_1", "key_1" => "value_2" }
expected_hash.each do |key, value|
subject.write(key, value)
end
subject.close
expect(saved_json(path)).to eq(expected_hash)
end
end
end
end
describe "#append" do
context "when key is already written" do
it "appends values under a given key" do
key = "key"
values = %w(value_1 value_2)
expected_hash = { key => values }
values.each do |value|
subject.append(key, value)
end
subject.close
expect(saved_json(path)).to eq(expected_hash)
end
end
context "when key is not already written" do
it "writes correct json" do
expected_hash = { "key" => ["value"] }
subject.append("key", "value")
subject.close
expect(saved_json(path)).to eq(expected_hash)
end
end
end
describe "#set" do
it "writes correct json" do
expected_hash = { "key" => "value_1", "key_1" => "value_2" }
subject.set(expected_hash)
subject.close
expect(saved_json(path)).to eq(expected_hash)
end
end
def saved_json(filename)
::JSON.parse(IO.read(filename))
end
end
Loading
Loading
@@ -2,7 +2,7 @@
 
require 'spec_helper'
 
describe Gitlab::ImportExport::RelationTreeSaver do
describe Gitlab::ImportExport::LegacyRelationTreeSaver do
let(:exportable) { create(:group) }
let(:relation_tree_saver) { described_class.new }
let(:tree) { {} }
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::Project::LegacyTreeSaver do
describe 'saves the project tree into a json object' do
let(:shared) { project.import_export_shared }
let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:user) { create(:user) }
let!(:project) { setup_project }
before do
project.add_maintainer(user)
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
end
after do
FileUtils.rm_rf(export_path)
end
it 'saves project successfully' do
expect(project_tree_saver.save).to be true
end
context ':export_fast_serialize feature flag checks' do
before do
expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared).and_return(reader)
expect(reader).to receive(:project_tree).and_return(project_tree)
end
let(:serializer) { instance_double('Gitlab::ImportExport::FastHashSerializer') }
let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
let(:project_tree) do
{
include: [{ issues: { include: [] } }],
preload: { issues: nil }
}
end
context 'when :export_fast_serialize feature is enabled' do
before do
stub_feature_flags(export_fast_serialize: true)
end
it 'uses FastHashSerializer' do
expect(Gitlab::ImportExport::FastHashSerializer)
.to receive(:new)
.with(project, project_tree)
.and_return(serializer)
expect(serializer).to receive(:execute)
project_tree_saver.save
end
end
context 'when :export_fast_serialize feature is disabled' do
before do
stub_feature_flags(export_fast_serialize: false)
end
it 'is serialized via built-in `as_json`' do
expect(project).to receive(:as_json).with(project_tree)
project_tree_saver.save
end
end
end
# It is mostly duplicated in
# `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
# except:
# context 'with description override' do
# context 'group members' do
# ^ These are specific for the Project::TreeSaver
context 'JSON' do
let(:saved_project_json) do
project_tree_saver.save
project_json(project_tree_saver.full_path)
end
# It is not duplicated in
# `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
context 'with description override' do
let(:params) { { description: 'Foo Bar' } }
let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared, params: params) }
it 'overrides the project description' do
expect(saved_project_json).to include({ 'description' => params[:description] })
end
end
it 'saves the correct json' do
expect(saved_project_json).to include({ 'description' => 'description', 'visibility_level' => 20 })
end
it 'has approvals_before_merge set' do
expect(saved_project_json['approvals_before_merge']).to eq(1)
end
it 'has milestones' do
expect(saved_project_json['milestones']).not_to be_empty
end
it 'has merge requests' do
expect(saved_project_json['merge_requests']).not_to be_empty
end
it 'has merge request\'s milestones' do
expect(saved_project_json['merge_requests'].first['milestone']).not_to be_empty
end
it 'has merge request\'s source branch SHA' do
expect(saved_project_json['merge_requests'].first['source_branch_sha']).to eq('ABCD')
end
it 'has merge request\'s target branch SHA' do
expect(saved_project_json['merge_requests'].first['target_branch_sha']).to eq('DCBA')
end
it 'has events' do
expect(saved_project_json['merge_requests'].first['milestone']['events']).not_to be_empty
end
it 'has snippets' do
expect(saved_project_json['snippets']).not_to be_empty
end
it 'has snippet notes' do
expect(saved_project_json['snippets'].first['notes']).not_to be_empty
end
it 'has releases' do
expect(saved_project_json['releases']).not_to be_empty
end
it 'has no author on releases' do
expect(saved_project_json['releases'].first['author']).to be_nil
end
it 'has the author ID on releases' do
expect(saved_project_json['releases'].first['author_id']).not_to be_nil
end
it 'has issues' do
expect(saved_project_json['issues']).not_to be_empty
end
it 'has issue comments' do
notes = saved_project_json['issues'].first['notes']
expect(notes).not_to be_empty
expect(notes.first['type']).to eq('DiscussionNote')
end
it 'has issue assignees' do
expect(saved_project_json['issues'].first['issue_assignees']).not_to be_empty
end
it 'has author on issue comments' do
expect(saved_project_json['issues'].first['notes'].first['author']).not_to be_empty
end
it 'has project members' do
expect(saved_project_json['project_members']).not_to be_empty
end
it 'has merge requests diffs' do
expect(saved_project_json['merge_requests'].first['merge_request_diff']).not_to be_empty
end
it 'has merge request diff files' do
expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_files']).not_to be_empty
end
it 'has merge request diff commits' do
expect(saved_project_json['merge_requests'].first['merge_request_diff']['merge_request_diff_commits']).not_to be_empty
end
it 'has merge requests comments' do
expect(saved_project_json['merge_requests'].first['notes']).not_to be_empty
end
it 'has author on merge requests comments' do
expect(saved_project_json['merge_requests'].first['notes'].first['author']).not_to be_empty
end
it 'has pipeline stages' do
expect(saved_project_json.dig('ci_pipelines', 0, 'stages')).not_to be_empty
end
it 'has pipeline statuses' do
expect(saved_project_json.dig('ci_pipelines', 0, 'stages', 0, 'statuses')).not_to be_empty
end
it 'has pipeline builds' do
builds_count = saved_project_json
.dig('ci_pipelines', 0, 'stages', 0, 'statuses')
.count { |hash| hash['type'] == 'Ci::Build' }
expect(builds_count).to eq(1)
end
it 'has no when YML attributes but only the DB column' do
expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
saved_project_json
end
it 'has pipeline commits' do
expect(saved_project_json['ci_pipelines']).not_to be_empty
end
it 'has ci pipeline notes' do
expect(saved_project_json['ci_pipelines'].first['notes']).not_to be_empty
end
it 'has labels with no associations' do
expect(saved_project_json['labels']).not_to be_empty
end
it 'has labels associated to records' do
expect(saved_project_json['issues'].first['label_links'].first['label']).not_to be_empty
end
it 'has project and group labels' do
label_types = saved_project_json['issues'].first['label_links'].map { |link| link['label']['type'] }
expect(label_types).to match_array(%w(ProjectLabel GroupLabel))
end
it 'has priorities associated to labels' do
priorities = saved_project_json['issues'].first['label_links'].flat_map { |link| link['label']['priorities'] }
expect(priorities).not_to be_empty
end
it 'has issue resource label events' do
expect(saved_project_json['issues'].first['resource_label_events']).not_to be_empty
end
it 'has merge request resource label events' do
expect(saved_project_json['merge_requests'].first['resource_label_events']).not_to be_empty
end
it 'saves the correct service type' do
expect(saved_project_json['services'].first['type']).to eq('CustomIssueTrackerService')
end
it 'saves the properties for a service' do
expect(saved_project_json['services'].first['properties']).to eq('one' => 'value')
end
it 'has project feature' do
project_feature = saved_project_json['project_feature']
expect(project_feature).not_to be_empty
expect(project_feature["issues_access_level"]).to eq(ProjectFeature::DISABLED)
expect(project_feature["wiki_access_level"]).to eq(ProjectFeature::ENABLED)
expect(project_feature["builds_access_level"]).to eq(ProjectFeature::PRIVATE)
end
it 'has custom attributes' do
expect(saved_project_json['custom_attributes'].count).to eq(2)
end
it 'has badges' do
expect(saved_project_json['project_badges'].count).to eq(2)
end
it 'does not complain about non UTF-8 characters in MR diff files' do
ActiveRecord::Base.connection.execute("UPDATE merge_request_diff_files SET diff = '---\n- :diff: !binary |-\n LS0tIC9kZXYvbnVsbAorKysgYi9pbWFnZXMvbnVjb3IucGRmCkBAIC0wLDAg\n KzEsMTY3OSBAQAorJVBERi0xLjUNJeLjz9MNCisxIDAgb2JqDTw8L01ldGFk\n YXR'")
expect(project_tree_saver.save).to be true
end
context 'group members' do
let(:user2) { create(:user, email: 'group@member.com') }
let(:member_emails) do
saved_project_json['project_members'].map do |pm|
pm['user']['email']
end
end
before do
Group.first.add_developer(user2)
end
it 'does not export group members if it has no permission' do
Group.first.add_developer(user)
expect(member_emails).not_to include('group@member.com')
end
it 'does not export group members as maintainer' do
Group.first.add_maintainer(user)
expect(member_emails).not_to include('group@member.com')
end
it 'exports group members as group owner' do
Group.first.add_owner(user)
expect(member_emails).to include('group@member.com')
end
context 'as admin' do
let(:user) { create(:admin) }
it 'exports group members as admin' do
expect(member_emails).to include('group@member.com')
end
it 'exports group members as project members' do
member_types = saved_project_json['project_members'].map { |pm| pm['source_type'] }
expect(member_types).to all(eq('Project'))
end
end
end
context 'project attributes' do
it 'does not contain the runners token' do
expect(saved_project_json).not_to include("runners_token" => 'token')
end
end
it 'has a board and a list' do
expect(saved_project_json['boards'].first['lists']).not_to be_empty
end
end
end
def setup_project
release = create(:release)
group = create(:group)
project = create(:project,
:public,
:repository,
:issues_disabled,
:wiki_enabled,
:builds_private,
description: 'description',
releases: [release],
group: group,
approvals_before_merge: 1
)
allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
issue = create(:issue, assignees: [user], project: project)
snippet = create(:project_snippet, project: project)
project_label = create(:label, project: project)
group_label = create(:group_label, group: group)
create(:label_link, label: project_label, target: issue)
create(:label_link, label: group_label, target: issue)
create(:label_priority, label: group_label, priority: 1)
milestone = create(:milestone, project: project)
merge_request = create(:merge_request, source_project: project, milestone: milestone)
ci_build = create(:ci_build, project: project, when: nil)
ci_build.pipeline.update(project: project)
create(:commit_status, project: project, pipeline: ci_build.pipeline)
create(:milestone, project: project)
create(:discussion_note, noteable: issue, project: project)
create(:note, noteable: merge_request, project: project)
create(:note, noteable: snippet, project: project)
create(:note_on_commit,
author: user,
project: project,
commit_id: ci_build.pipeline.sha)
create(:resource_label_event, label: project_label, issue: issue)
create(:resource_label_event, label: group_label, merge_request: merge_request)
create(:event, :created, target: milestone, project: project, author: user)
create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
create(:project_custom_attribute, project: project)
create(:project_custom_attribute, project: project)
create(:project_badge, project: project)
create(:project_badge, project: project)
board = create(:board, project: project, name: 'TestBoard')
create(:list, board: board, position: 0, label: project_label)
project
end
def project_json(filename)
::JSON.parse(IO.read(filename))
end
end
Loading
Loading
@@ -25,57 +25,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do
expect(project_tree_saver.save).to be true
end
 
context ':export_fast_serialize feature flag checks' do
before do
expect(Gitlab::ImportExport::Reader).to receive(:new).with(shared: shared).and_return(reader)
expect(reader).to receive(:project_tree).and_return(project_tree)
end
let(:serializer) { instance_double('Gitlab::ImportExport::FastHashSerializer') }
let(:reader) { instance_double('Gitlab::ImportExport::Reader') }
let(:project_tree) do
{
include: [{ issues: { include: [] } }],
preload: { issues: nil }
}
end
context 'when :export_fast_serialize feature is enabled' do
before do
stub_feature_flags(export_fast_serialize: true)
end
it 'uses FastHashSerializer' do
expect(Gitlab::ImportExport::FastHashSerializer)
.to receive(:new)
.with(project, project_tree)
.and_return(serializer)
expect(serializer).to receive(:execute)
project_tree_saver.save
end
end
context 'when :export_fast_serialize feature is disabled' do
before do
stub_feature_flags(export_fast_serialize: false)
end
it 'is serialized via built-in `as_json`' do
expect(project).to receive(:as_json).with(project_tree)
project_tree_saver.save
end
end
end
# It is mostly duplicated in
# `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
# except:
# context 'with description override' do
# context 'group members' do
# ^ These are specific for the Project::TreeSaver
context 'JSON' do
let(:saved_project_json) do
project_tree_saver.save
Loading
Loading
@@ -392,6 +341,6 @@ describe Gitlab::ImportExport::Project::TreeSaver do
end
 
def project_json(filename)
JSON.parse(IO.read(filename))
::JSON.parse(IO.read(filename))
end
end
Loading
Loading
@@ -3946,6 +3946,53 @@ describe Ci::Build do
end
end
 
describe '#collect_coverage_reports!' do
subject { build.collect_coverage_reports!(coverage_report) }
let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
it { expect(coverage_report.files).to eq({}) }
context 'when build has a coverage report' do
context 'when there is a Cobertura coverage report from simplecov-cobertura' do
before do
create(:ci_job_artifact, :cobertura, job: build, project: build.project)
end
it 'parses blobs and add the results to the coverage report' do
expect { subject }.not_to raise_error
expect(coverage_report.files.keys).to match_array(['app/controllers/abuse_reports_controller.rb'])
expect(coverage_report.files['app/controllers/abuse_reports_controller.rb'].count).to eq(23)
end
end
context 'when there is a Cobertura coverage report from gocov-xml' do
before do
create(:ci_job_artifact, :coverage_gocov_xml, job: build, project: build.project)
end
it 'parses blobs and add the results to the coverage report' do
expect { subject }.not_to raise_error
expect(coverage_report.files.keys).to match_array(['auth/token.go', 'auth/rpccredentials.go'])
expect(coverage_report.files['auth/token.go'].count).to eq(49)
expect(coverage_report.files['auth/rpccredentials.go'].count).to eq(10)
end
end
context 'when there is a corrupted Cobertura coverage report' do
before do
create(:ci_job_artifact, :coverage_with_corrupted_data, job: build, project: build.project)
end
it 'raises an error' do
expect { subject }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::CoberturaParserError)
end
end
end
end
describe '#report_artifacts' do
subject { build.report_artifacts }
 
Loading
Loading
Loading
Loading
@@ -70,6 +70,22 @@ describe Ci::JobArtifact do
end
end
 
describe '.coverage_reports' do
subject { described_class.coverage_reports }
context 'when there is a coverage report' do
let!(:artifact) { create(:ci_job_artifact, :cobertura) }
it { is_expected.to eq([artifact]) }
end
context 'when there are no coverage reports' do
let!(:artifact) { create(:ci_job_artifact, :archive) }
it { is_expected.to be_empty }
end
end
describe '.erasable' do
subject { described_class.erasable }
 
Loading
Loading
Loading
Loading
@@ -344,9 +344,9 @@ describe Ci::Pipeline, :mailer do
end
 
describe '.with_reports' do
subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
context 'when pipeline has a test report' do
subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
let!(:pipeline_with_report) { create(:ci_pipeline, :with_test_reports) }
 
it 'selects the pipeline' do
Loading
Loading
@@ -354,7 +354,19 @@ describe Ci::Pipeline, :mailer do
end
end
 
context 'when pipeline has a coverage report' do
subject { described_class.with_reports(Ci::JobArtifact.coverage_reports) }
let!(:pipeline_with_report) { create(:ci_pipeline, :with_coverage_reports) }
it 'selects the pipeline' do
is_expected.to eq([pipeline_with_report])
end
end
context 'when pipeline does not have metrics reports' do
subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
let!(:pipeline_without_report) { create(:ci_empty_pipeline) }
 
it 'does not select the pipeline' do
Loading
Loading
@@ -2730,6 +2742,43 @@ describe Ci::Pipeline, :mailer do
end
end
 
describe '#coverage_reports' do
subject { pipeline.coverage_reports }
context 'when pipeline has multiple builds with coverage reports' do
let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline, project: project) }
let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: pipeline, project: project) }
before do
create(:ci_job_artifact, :cobertura, job: build_rspec, project: project)
create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang, project: project)
end
it 'returns coverage reports with collected data' do
expect(subject.files.keys).to match_array([
"auth/token.go",
"auth/rpccredentials.go",
"app/controllers/abuse_reports_controller.rb"
])
end
context 'when builds are retried' do
let!(:build_rspec) { create(:ci_build, :retried, :success, name: 'rspec', pipeline: pipeline, project: project) }
let!(:build_golang) { create(:ci_build, :retried, :success, name: 'golang', pipeline: pipeline, project: project) }
it 'does not take retried builds into account' do
expect(subject.files).to eql({})
end
end
end
context 'when pipeline does not have any builds with coverage reports' do
it 'returns empty coverage reports' do
expect(subject.files).to eql({})
end
end
end
describe '#total_size' do
let!(:build_job1) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
let!(:build_job2) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
Loading
Loading
Loading
Loading
@@ -908,6 +908,16 @@ describe MergeRequest do
end
end
 
describe '#new_paths' do
let(:merge_request) do
create(:merge_request, source_branch: 'expand-collapse-files', target_branch: 'master')
end
it 'returns new path of changed files' do
expect(merge_request.new_paths.count).to eq(105)
end
end
describe "#related_notes" do
let!(:merge_request) { create(:merge_request) }
 
Loading
Loading
@@ -1581,6 +1591,24 @@ describe MergeRequest do
end
end
 
describe '#has_coverage_reports?' do
subject { merge_request.has_coverage_reports? }
let(:project) { create(:project, :repository) }
context 'when head pipeline has coverage reports' do
let(:merge_request) { create(:merge_request, :with_coverage_reports, source_project: project) }
it { is_expected.to be_truthy }
end
context 'when head pipeline does not have coverage reports' do
let(:merge_request) { create(:merge_request, source_project: project) }
it { is_expected.to be_falsey }
end
end
describe '#calculate_reactive_cache' do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
Loading
Loading
@@ -1663,6 +1691,60 @@ describe MergeRequest do
end
end
 
describe '#find_coverage_reports' do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, :with_coverage_reports, source_project: project) }
let(:pipeline) { merge_request.head_pipeline }
subject { merge_request.find_coverage_reports }
context 'when head pipeline has coverage reports' do
let!(:job) do
create(:ci_build, options: { artifacts: { reports: { cobertura: ['cobertura-coverage.xml'] } } }, pipeline: pipeline)
end
let!(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
context 'when reactive cache worker is parsing results asynchronously' do
it 'returns status' do
expect(subject[:status]).to eq(:parsing)
end
end
context 'when reactive cache worker is inline' do
before do
synchronous_reactive_cache(merge_request)
end
it 'returns status and data' do
expect(subject[:status]).to eq(:parsed)
end
context 'when an error occurrs' do
before do
merge_request.update!(head_pipeline: nil)
end
it 'returns an error message' do
expect(subject[:status]).to eq(:error)
end
end
context 'when cached results is not latest' do
before do
allow_next_instance_of(Ci::GenerateCoverageReportsService) do |service|
allow(service).to receive(:latest?).and_return(false)
end
end
it 'raises and InvalidateReactiveCache error' do
expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
end
end
end
end
end
describe '#compare_test_reports' do
subject { merge_request.compare_test_reports }
 
Loading
Loading
Loading
Loading
@@ -4335,4 +4335,27 @@ describe User, :do_not_mock_admin_mode do
it { expect(user.user_detail).to be_persisted }
end
end
describe '#gitlab_employee?' do
using RSpec::Parameterized::TableSyntax
subject { user.gitlab_employee? }
where(:email, :is_com, :expected_result) do
'test@gitlab.com' | true | true
'test@example.com' | true | false
'test@gitlab.com' | false | false
'test@example.com' | false | false
end
with_them do
let(:user) { build(:user, email: email) }
before do
allow(Gitlab).to receive(:com?).and_return(is_com)
end
it { is_expected.to be expected_result }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Projects::ImportExport::ProjectExportPresenter do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
subject { described_class.new(project, current_user: user) }
describe '#description' do
context "override_description not provided" do
it "keeps original description" do
expect(subject.description).to eq(project.description)
end
end
context "override_description provided" do
let(:description) { "overridden description" }
subject { described_class.new(project, current_user: user, override_description: description) }
it "overrides description" do
expect(subject.description).to eq(description)
end
end
end
describe '#as_json' do
context "override_description not provided" do
it "keeps original description" do
expect(subject.as_json["description"]).to eq(project.description)
end
end
context "override_description provided" do
let(:description) { "overridden description" }
subject { described_class.new(project, current_user: user, override_description: description) }
it "overrides description" do
expect(subject.as_json["description"]).to eq(description)
end
end
end
describe '#project_members' do
let(:user2) { create(:user, email: 'group@member.com') }
let(:member_emails) do
subject.project_members.map do |pm|
pm.user.email
end
end
before do
group.add_developer(user2)
end
it 'does not export group members if it has no permission' do
group.add_developer(user)
expect(member_emails).not_to include('group@member.com')
end
it 'does not export group members as maintainer' do
group.add_maintainer(user)
expect(member_emails).not_to include('group@member.com')
end
it 'exports group members as group owner' do
group.add_owner(user)
expect(member_emails).to include('group@member.com')
end
context 'as admin' do
let(:user) { create(:admin) }
it 'exports group members as admin' do
expect(member_emails).to include('group@member.com')
end
it 'exports group members as project members' do
member_types = subject.project_members.map { |pm| pm.source_type }
expect(member_types).to all(eq('Project'))
end
end
end
end
Loading
Loading
@@ -36,7 +36,8 @@ describe Ci::RetryBuildService do
job_artifacts_performance job_artifacts_lsif
job_artifacts_codequality job_artifacts_metrics scheduled_at
job_variables waiting_for_resource_at job_artifacts_metrics_referee
job_artifacts_network_referee job_artifacts_dotenv needs].freeze
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs].freeze
 
IGNORE_ACCESSORS =
%i[type lock_version target_url base_tags trace_sections
Loading
Loading
Loading
Loading
@@ -26,10 +26,28 @@ describe Projects::ImportExport::ExportService do
service.execute
end
 
it 'saves the models' do
expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
context 'when :streaming_serializer feature is enabled' do
before do
stub_feature_flags(streaming_serializer: true)
end
 
service.execute
it 'saves the models' do
expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
service.execute
end
end
context 'when :streaming_serializer feature is disabled' do
before do
stub_feature_flags(streaming_serializer: false)
end
it 'saves the models' do
expect(Gitlab::ImportExport::Project::LegacyTreeSaver).to receive(:new).and_call_original
service.execute
end
end
 
it 'saves the uploads' do
Loading
Loading
Loading
Loading
@@ -55,6 +55,58 @@ RSpec.shared_examples 'with cross-reference system notes' do
end
 
RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name, can_reply_to_individual_notes: false|
shared_examples 'is_gitlab_employee attribute presence' do
subject { get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user) }
before do
allow(Gitlab).to receive(:com?).and_return(true)
user.update(email: email)
user.confirm
end
context 'when author is a gitlab employee' do
let(:email) { 'test@gitlab.com' }
it 'returns is_gitlab_employee as true' do
subject
expect(json_response.first["notes"].first["author"]['is_gitlab_employee']).to be true
end
end
shared_examples 'non inclusion of gitlab employee badge' do
it 'does not include is_gitlab_employee attribute' do
subject
expect(json_response.first["notes"].first["author"]).not_to have_key('is_gitlab_employee')
end
end
context 'when author is not a gitlab employee' do
let(:email) { 'test@example.com' }
it_behaves_like 'non inclusion of gitlab employee badge'
end
describe 'when feature flag is disabled' do
before do
stub_feature_flags(gitlab_employee_badge: false)
end
context 'when author is a gitlab employee' do
let(:email) { 'test@gitlab.com' }
it_behaves_like 'non inclusion of gitlab employee badge'
end
context 'when author is not a gitlab employee' do
let(:email) { 'test@example.com' }
it_behaves_like 'non inclusion of gitlab employee badge'
end
end
end
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions" do
it "returns an array of discussions" do
get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user)
Loading
Loading
@@ -78,6 +130,8 @@ RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name,
 
expect(response).to have_gitlab_http_status(:not_found)
end
it_behaves_like 'is_gitlab_employee attribute presence'
end
 
describe "GET /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions/:discussion_id" do
Loading
Loading
@@ -196,6 +250,8 @@ RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name,
end
end
end
it_behaves_like 'is_gitlab_employee attribute presence'
end
 
describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions/:discussion_id/notes" do
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment