Skip to content
Snippets Groups Projects
Commit 11e5d1b9 authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 7351a484
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -907,7 +907,7 @@ describe 'Git LFS API and storage' do
it_behaves_like 'LFS http 200 response'
 
it 'uses the gitlab-workhorse content type' do
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
 
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
describe Projects::LsifDataService do
let(:artifact) { create(:ci_job_artifact, :lsif) }
let(:project) { build_stubbed(:project) }
let(:path) { 'main.go' }
let(:commit_id) { Digest::SHA1.hexdigest(SecureRandom.hex) }
let(:params) { { path: path, commit_id: commit_id } }
let(:service) { described_class.new(artifact.file, project, params) }
describe '#execute' do
context 'fetched lsif file', :use_clean_rails_memory_store_caching do
it 'is cached' do
service.execute
cached_data = Rails.cache.fetch("project:#{project.id}:lsif:#{commit_id}")
expect(cached_data.keys).to eq(%w[def_refs doc_ranges docs hover_refs ranges])
end
end
context 'for main.go' do
it 'returns lsif ranges for the file' do
expect(service.execute).to eq([
{
end_char: 9,
end_line: 6,
start_char: 5,
start_line: 6
},
{
end_char: 36,
end_line: 3,
start_char: 1,
start_line: 3
},
{
end_char: 12,
end_line: 7,
start_char: 1,
start_line: 7
},
{
end_char: 20,
end_line: 7,
start_char: 13,
start_line: 7
},
{
end_char: 12,
end_line: 8,
start_char: 1,
start_line: 8
},
{
end_char: 18,
end_line: 8,
start_char: 13,
start_line: 8
}
])
end
end
context 'for morestring/reverse.go' do
let(:path) { 'morestrings/reverse.go' }
it 'returns lsif ranges for the file' do
expect(service.execute.first).to eq({
end_char: 2,
end_line: 11,
start_char: 1,
start_line: 11
})
end
end
context 'for an unknown file' do
let(:path) { 'unknown.go' }
it 'returns nil' do
expect(service.execute).to eq(nil)
end
end
end
describe '#doc_id_from' do
context 'when the passed path matches multiple files' do
let(:path) { 'check/main.go' }
let(:docs) do
{
1 => 'cmd/check/main.go',
2 => 'cmd/command.go',
3 => 'check/main.go',
4 => 'cmd/nested/check/main.go'
}
end
it 'fetches the document with the shortest absolute path' do
expect(service.__send__(:doc_id_from, docs)).to eq(3)
end
end
end
end
Loading
Loading
@@ -13,18 +13,18 @@ describe Spam::HamService do
allow(Spam::AkismetService).to receive(:new).and_return fake_akismet_service
end
 
describe '#mark_as_ham!' do
describe '#execute' do
context 'AkismetService returns false (Akismet cannot be reached, etc)' do
before do
allow(fake_akismet_service).to receive(:submit_ham).and_return false
end
 
it 'returns false' do
expect(subject.mark_as_ham!).to be_falsey
expect(subject.execute).to be_falsey
end
 
it 'does not update the record' do
expect { subject.mark_as_ham! }.not_to change { spam_log.submitted_as_ham }
expect { subject.execute }.not_to change { spam_log.submitted_as_ham }
end
 
context 'if spam log record has already been marked as spam' do
Loading
Loading
@@ -33,7 +33,7 @@ describe Spam::HamService do
end
 
it 'does not update the record' do
expect { subject.mark_as_ham! }.not_to change { spam_log.submitted_as_ham }
expect { subject.execute }.not_to change { spam_log.submitted_as_ham }
end
end
end
Loading
Loading
@@ -45,11 +45,11 @@ describe Spam::HamService do
end
 
it 'returns true' do
expect(subject.mark_as_ham!).to be_truthy
expect(subject.execute).to be_truthy
end
 
it 'updates the record' do
expect { subject.mark_as_ham! }.to change { spam_log.submitted_as_ham }.from(false).to(true)
expect { subject.execute }.to change { spam_log.submitted_as_ham }.from(false).to(true)
end
end
end
Loading
Loading
Loading
Loading
@@ -46,4 +46,8 @@ module ApiHelpers
expect(json_response).to be_an Array
expect(json_response.map { |item| item['id'] }).to eq(Array(items))
end
def stub_last_activity_update
allow_any_instance_of(Users::ActivityService).to receive(:execute)
end
end
Loading
Loading
@@ -17,5 +17,38 @@ module ImportExport
 
allow_any_instance_of(Gitlab::ImportExport).to receive(:export_path) { export_path }
end
def fixtures_path
"spec/fixtures/lib/gitlab/import_export"
end
def test_tmp_path
"tmp/tests/gitlab-test/import_export"
end
def restore_then_save_project(project, import_path:, export_path:)
project_restorer = get_project_restorer(project, import_path)
project_saver = get_project_saver(project, export_path)
project_restorer.restore && project_saver.save
end
def get_project_restorer(project, import_path)
Gitlab::ImportExport::ProjectTreeRestorer.new(
user: project.creator, shared: get_shared_env(path: import_path), project: project
)
end
def get_project_saver(project, export_path)
Gitlab::ImportExport::ProjectTreeSaver.new(
project: project, current_user: project.creator, shared: get_shared_env(path: export_path)
)
end
def get_shared_env(path:)
instance_double(Gitlab::ImportExport::Shared).tap do |shared|
allow(shared).to receive(:export_path).and_return(path)
end
end
end
end
# frozen_string_literal: true
module ImportExport
module ProjectTreeExpectations
def assert_relations_match(imported_hash, exported_hash)
normalized_imported_hash = normalize_elements(imported_hash)
normalized_exported_hash = normalize_elements(exported_hash)
# this is for sanity checking, to make sure we didn't accidentally pass the test
# because we essentially ignored everything
stats = {
hashes: 0,
arrays: {
direct: 0,
pairwise: 0,
fuzzy: 0
},
values: 0
}
failures = match_recursively(normalized_imported_hash, normalized_exported_hash, stats)
puts "Elements checked:\n#{stats.pretty_inspect}"
expect(failures).to be_empty, failures.join("\n\n")
end
private
def match_recursively(left_node, right_node, stats, location_stack = [], failures = [])
if Hash === left_node && Hash === right_node
match_hashes(left_node, right_node, stats, location_stack, failures)
elsif Array === left_node && Array === right_node
match_arrays(left_node, right_node, stats, location_stack, failures)
else
stats[:values] += 1
if left_node != right_node
failures << failure_message("Value mismatch", location_stack, left_node, right_node)
end
end
failures
end
def match_hashes(left_node, right_node, stats, location_stack, failures)
stats[:hashes] += 1
left_keys = left_node.keys.to_set
right_keys = right_node.keys.to_set
if left_keys != right_keys
failures << failure_message("Hash keys mismatch", location_stack, left_keys, right_keys)
end
left_node.keys.each do |key|
location_stack << key
match_recursively(left_node[key], right_node[key], stats, location_stack, failures)
location_stack.pop
end
end
def match_arrays(left_node, right_node, stats, location_stack, failures)
has_simple_elements = left_node.none? { |el| Enumerable === el }
# for simple types, we can do a direct order-less set comparison
if has_simple_elements && left_node.to_set != right_node.to_set
stats[:arrays][:direct] += 1
failures << failure_message("Elements mismatch", location_stack, left_node, right_node)
# if both arrays have the same number of complex elements, we can compare pair-wise in-order
elsif left_node.size == right_node.size
stats[:arrays][:pairwise] += 1
left_node.zip(right_node).each do |left_entry, right_entry|
match_recursively(left_entry, right_entry, stats, location_stack, failures)
end
# otherwise we have to fall back to a best-effort match by probing into the right array;
# this means we will not account for elements that exist on the right, but not on the left
else
stats[:arrays][:fuzzy] += 1
left_node.each do |left_entry|
right_entry = right_node.find { |el| el == left_entry }
match_recursively(left_entry, right_entry, stats, location_stack, failures)
end
end
end
def failure_message(what, location_stack, left_value, right_value)
where =
if location_stack.empty?
"root"
else
location_stack.map { |loc| loc.to_sym.inspect }.join(' -> ')
end
">> [#{where}] #{what}\n\n#{left_value.pretty_inspect}\nNOT EQUAL TO\n\n#{right_value.pretty_inspect}"
end
# Helper that traverses a project tree and normalizes data that we know
# to vary in the process of importing (such as list order or row IDs)
def normalize_elements(elem)
case elem
when Hash
elem.map do |key, value|
if ignore_key?(key, value)
[key, :ignored]
else
[key, normalize_elements(value)]
end
end.to_h
when Array
elem.map { |a| normalize_elements(a) }
else
elem
end
end
# We currently need to ignore certain entries when checking for equivalence because
# we know them to change between imports/exports either by design or because of bugs;
# this helper filters out these problematic nodes.
def ignore_key?(key, value)
id?(key) || # IDs are known to be replaced during imports
key == 'updated_at' || # these get changed frequently during imports
key == 'next_run_at' || # these values change based on wall clock
key == 'notes' # the importer attaches an extra "by user XYZ" at the end of a note
end
def id?(key)
key == 'id' || key.ends_with?('_id')
end
end
end
Loading
Loading
@@ -14,6 +14,18 @@ describe ReactiveCachingWorker do
 
described_class.new.perform("Environment", environment.id)
end
context 'when ReactiveCaching::ExceededReactiveCacheLimit is raised' do
it 'avoids failing the job and tracks via Gitlab::ErrorTracking' do
allow_any_instance_of(Environment).to receive(:exclusively_update_reactive_cache!)
.and_raise(ReactiveCaching::ExceededReactiveCacheLimit)
expect(Gitlab::ErrorTracking).to receive(:track_exception)
.with(kind_of(ReactiveCaching::ExceededReactiveCacheLimit))
described_class.new.perform("Environment", environment.id)
end
end
end
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment