Skip to content
Snippets Groups Projects
Commit 81f7adf0 authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 383daa12
No related branches found
No related tags found
No related merge requests found
Showing
with 185 additions and 55 deletions
Loading
Loading
@@ -21,7 +21,7 @@ module Gitlab
 
def remote_branches(remote_name)
request = Gitaly::FindAllRemoteBranchesRequest.new(repository: @gitaly_repo, remote_name: remote_name)
response = GitalyClient.call(@repository.storage, :ref_service, :find_all_remote_branches, request)
response = GitalyClient.call(@repository.storage, :ref_service, :find_all_remote_branches, request, timeout: GitalyClient.medium_timeout)
 
consume_find_all_remote_branches_response(remote_name, response)
end
Loading
Loading
@@ -158,7 +158,7 @@ module Gitlab
start_point: encode_binary(start_point)
)
 
response = GitalyClient.call(@repository.storage, :ref_service, :create_branch, request)
response = GitalyClient.call(@repository.storage, :ref_service, :create_branch, request, timeout: GitalyClient.medium_timeout)
 
case response.status
when :OK
Loading
Loading
@@ -182,7 +182,7 @@ module Gitlab
name: encode_binary(branch_name)
)
 
GitalyClient.call(@repository.storage, :ref_service, :delete_branch, request)
GitalyClient.call(@repository.storage, :ref_service, :delete_branch, request, timeout: GitalyClient.medium_timeout)
end
 
def delete_refs(refs: [], except_with_prefixes: [])
Loading
Loading
@@ -192,7 +192,7 @@ module Gitlab
except_with_prefix: except_with_prefixes.map { |r| encode_binary(r) }
)
 
response = GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request, timeout: GitalyClient.default_timeout)
response = GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request, timeout: GitalyClient.medium_timeout)
 
raise Gitlab::Git::Repository::GitError, response.git_error if response.git_error.present?
end
Loading
Loading
@@ -242,7 +242,7 @@ module Gitlab
def pack_refs
request = Gitaly::PackRefsRequest.new(repository: @gitaly_repo)
 
GitalyClient.call(@storage, :ref_service, :pack_refs, request)
GitalyClient.call(@storage, :ref_service, :pack_refs, request, timeout: GitalyClient.long_timeout)
end
 
private
Loading
Loading
Loading
Loading
@@ -38,9 +38,7 @@ module Gitlab
def remove_remote(name)
request = Gitaly::RemoveRemoteRequest.new(repository: @gitaly_repo, name: name)
 
response = GitalyClient.call(@storage, :remote_service, :remove_remote, request)
response.result
GitalyClient.call(@storage, :remote_service, :remove_remote, request, timeout: GitalyClient.long_timeout).result
end
 
def fetch_internal_remote(repository)
Loading
Loading
@@ -51,6 +49,7 @@ module Gitlab
 
response = GitalyClient.call(@storage, :remote_service,
:fetch_internal_remote, request,
timeout: GitalyClient.medium_timeout,
remote_storage: repository.storage)
 
response.result
Loading
Loading
@@ -63,7 +62,7 @@ module Gitlab
)
 
response = GitalyClient.call(@storage, :remote_service,
:find_remote_root_ref, request)
:find_remote_root_ref, request, timeout: GitalyClient.medium_timeout)
 
encode_utf8(response.ref)
end
Loading
Loading
@@ -95,7 +94,7 @@ module Gitlab
end
end
 
GitalyClient.call(@storage, :remote_service, :update_remote_mirror, req_enum)
GitalyClient.call(@storage, :remote_service, :update_remote_mirror, req_enum, timeout: GitalyClient.long_timeout)
end
end
end
Loading
Loading
Loading
Loading
@@ -28,17 +28,17 @@ module Gitlab
 
def garbage_collect(create_bitmap)
request = Gitaly::GarbageCollectRequest.new(repository: @gitaly_repo, create_bitmap: create_bitmap)
GitalyClient.call(@storage, :repository_service, :garbage_collect, request)
GitalyClient.call(@storage, :repository_service, :garbage_collect, request, timeout: GitalyClient.long_timeout)
end
 
def repack_full(create_bitmap)
request = Gitaly::RepackFullRequest.new(repository: @gitaly_repo, create_bitmap: create_bitmap)
GitalyClient.call(@storage, :repository_service, :repack_full, request)
GitalyClient.call(@storage, :repository_service, :repack_full, request, timeout: GitalyClient.long_timeout)
end
 
def repack_incremental
request = Gitaly::RepackIncrementalRequest.new(repository: @gitaly_repo)
GitalyClient.call(@storage, :repository_service, :repack_incremental, request)
GitalyClient.call(@storage, :repository_service, :repack_incremental, request, timeout: GitalyClient.long_timeout)
end
 
def repository_size
Loading
Loading
@@ -86,12 +86,12 @@ module Gitlab
end
end
 
GitalyClient.call(@storage, :repository_service, :fetch_remote, request)
GitalyClient.call(@storage, :repository_service, :fetch_remote, request, timeout: GitalyClient.long_timeout)
end
 
def create_repository
request = Gitaly::CreateRepositoryRequest.new(repository: @gitaly_repo)
GitalyClient.call(@storage, :repository_service, :create_repository, request, timeout: GitalyClient.medium_timeout)
GitalyClient.call(@storage, :repository_service, :create_repository, request, timeout: GitalyClient.fast_timeout)
end
 
def has_local_branches?
Loading
Loading
@@ -189,6 +189,7 @@ module Gitlab
:repository_service,
:fetch_source_branch,
request,
timeout: GitalyClient.default_timeout,
remote_storage: source_repository.storage
)
 
Loading
Loading
@@ -197,7 +198,7 @@ module Gitlab
 
def fsck
request = Gitaly::FsckRequest.new(repository: @gitaly_repo)
response = GitalyClient.call(@storage, :repository_service, :fsck, request, timeout: GitalyClient.no_timeout)
response = GitalyClient.call(@storage, :repository_service, :fsck, request, timeout: GitalyClient.long_timeout)
 
if response.error.empty?
return "", 0
Loading
Loading
@@ -211,7 +212,7 @@ module Gitlab
save_path,
:create_bundle,
Gitaly::CreateBundleRequest,
GitalyClient.no_timeout
GitalyClient.long_timeout
)
end
 
Loading
Loading
@@ -229,7 +230,7 @@ module Gitlab
bundle_path,
:create_repository_from_bundle,
Gitaly::CreateRepositoryFromBundleRequest,
GitalyClient.no_timeout
GitalyClient.long_timeout
)
end
 
Loading
Loading
@@ -254,7 +255,7 @@ module Gitlab
:repository_service,
:create_repository_from_snapshot,
request,
timeout: GitalyClient.no_timeout
timeout: GitalyClient.long_timeout
)
end
 
Loading
Loading
@@ -333,7 +334,7 @@ module Gitlab
 
def search_files_by_content(ref, query)
request = Gitaly::SearchFilesByContentRequest.new(repository: @gitaly_repo, ref: ref, query: query)
response = GitalyClient.call(@storage, :repository_service, :search_files_by_content, request)
response = GitalyClient.call(@storage, :repository_service, :search_files_by_content, request, timeout: GitalyClient.default_timeout)
 
search_results_from_response(response)
end
Loading
Loading
@@ -343,7 +344,7 @@ module Gitlab
repository: @gitaly_repo
)
 
GitalyClient.call(@storage, :object_pool_service, :disconnect_git_alternates, request)
GitalyClient.call(@storage, :object_pool_service, :disconnect_git_alternates, request, timeout: GitalyClient.long_timeout)
end
 
private
Loading
Loading
Loading
Loading
@@ -11,14 +11,14 @@ module Gitlab
def list_directories(depth: 1)
request = Gitaly::ListDirectoriesRequest.new(storage_name: @storage, depth: depth)
 
GitalyClient.call(@storage, :storage_service, :list_directories, request)
GitalyClient.call(@storage, :storage_service, :list_directories, request, timeout: GitalyClient.medium_timeout)
.flat_map(&:paths)
end
 
# Delete all repositories in the storage. This is a slow and VERY DESTRUCTIVE operation.
def delete_all_repositories
request = Gitaly::DeleteAllRepositoriesRequest.new(storage_name: @storage)
GitalyClient.call(@storage, :storage_service, :delete_all_repositories, request)
GitalyClient.call(@storage, :storage_service, :delete_all_repositories, request, timeout: GitalyClient.long_timeout)
end
end
end
Loading
Loading
Loading
Loading
@@ -34,7 +34,7 @@ module Gitlab
end
end
 
response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_write_page, enum)
response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_write_page, enum, timeout: GitalyClient.medium_timeout)
if error = response.duplicate_error.presence
raise Gitlab::Git::Wiki::DuplicatePageError, error
end
Loading
Loading
@@ -61,7 +61,7 @@ module Gitlab
end
end
 
GitalyClient.call(@repository.storage, :wiki_service, :wiki_update_page, enum)
GitalyClient.call(@repository.storage, :wiki_service, :wiki_update_page, enum, timeout: GitalyClient.medium_timeout)
end
 
def delete_page(page_path, commit_details)
Loading
Loading
@@ -187,7 +187,7 @@ module Gitlab
directory: encode_binary(dir)
)
 
response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_formatted_data, request)
response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_formatted_data, request, timeout: GitalyClient.medium_timeout)
response.reduce([]) { |memo, msg| memo << msg.data }.join
end
 
Loading
Loading
Loading
Loading
@@ -17,6 +17,9 @@ module Gitlab
CHECK_INTERVAL_SECONDS = [ENV.fetch('SIDEKIQ_MEMORY_KILLER_CHECK_INTERVAL', 3).to_i, 2].max
# Give Sidekiq up to 30 seconds to allow existing jobs to finish after exceeding the limit
SHUTDOWN_TIMEOUT_SECONDS = ENV.fetch('SIDEKIQ_MEMORY_KILLER_SHUTDOWN_WAIT', 30).to_i
# Developer/admin should always set `memory_killer_max_memory_growth_kb` explicitly
# In case not set, default to 300M. This is for extra-safe.
DEFAULT_MAX_MEMORY_GROWTH_KB = 300_000
 
def initialize
super
Loading
Loading
@@ -90,7 +93,7 @@ module Gitlab
 
def rss_within_range?
current_rss = nil
deadline = Time.now + GRACE_BALLOON_SECONDS.seconds
deadline = Gitlab::Metrics::System.monotonic_time + GRACE_BALLOON_SECONDS.seconds
loop do
return true unless enabled?
 
Loading
Loading
@@ -103,7 +106,7 @@ module Gitlab
return true if current_rss < soft_limit_rss
 
# RSS did not go below the soft limit within deadline, restart
break if Time.now > deadline
break if Gitlab::Metrics::System.monotonic_time > deadline
 
sleep(CHECK_INTERVAL_SECONDS)
end
Loading
Loading
@@ -159,11 +162,11 @@ module Gitlab
)
Process.kill(signal, pid)
 
deadline = Time.now + time
deadline = Gitlab::Metrics::System.monotonic_time + time
 
# we try to finish as early as all jobs finished
# so we retest that in loop
sleep(CHECK_INTERVAL_SECONDS) while enabled? && any_jobs? && Time.now < deadline
sleep(CHECK_INTERVAL_SECONDS) while enabled? && any_jobs? && Gitlab::Metrics::System.monotonic_time < deadline
end
 
def signal_pgroup(signal, explanation)
Loading
Loading
@@ -192,11 +195,11 @@ module Gitlab
 
def rss_increase_by_job(job)
memory_growth_kb = get_job_options(job, 'memory_killer_memory_growth_kb', 0).to_i
max_memory_growth_kb = get_job_options(job, 'memory_killer_max_memory_growth_kb', MAX_MEMORY_KB).to_i
max_memory_growth_kb = get_job_options(job, 'memory_killer_max_memory_growth_kb', DEFAULT_MAX_MEMORY_GROWTH_KB).to_i
 
return 0 if memory_growth_kb.zero?
 
time_elapsed = Time.now.to_i - job[:started_at]
time_elapsed = [Gitlab::Metrics::System.monotonic_time - job[:started_at], 0].max
[memory_growth_kb * time_elapsed, max_memory_growth_kb].min
end
 
Loading
Loading
Loading
Loading
@@ -26,7 +26,7 @@ module Gitlab
 
def within_job(worker_class, jid, queue)
jobs_mutex.synchronize do
jobs[jid] = { worker_class: worker_class, thread: Thread.current, started_at: Time.now.to_i }
jobs[jid] = { worker_class: worker_class, thread: Thread.current, started_at: Gitlab::Metrics::System.monotonic_time }
end
 
if cancelled?(jid)
Loading
Loading
Loading
Loading
@@ -11,6 +11,7 @@ module QA
 
project = Resource::Project.fabricate_via_api! do |resource|
resource.name = 'project-to-test-mention'
resource.visibility = 'private'
end
project.visit!
 
Loading
Loading
import { mount } from '@vue/test-utils';
import CollpasibleSection from '~/jobs/components/log/collapsible_section.vue';
import { nestedSectionOpened, nestedSectionClosed } from './mock_data';
describe('Job Log Collapsible Section', () => {
let wrapper;
const traceEndpoint = 'jobs/335';
const findCollapsibleLine = () => wrapper.find('.collapsible-line');
const createComponent = (props = {}) => {
wrapper = mount(CollpasibleSection, {
sync: true,
propsData: {
...props,
},
});
};
afterEach(() => {
wrapper.destroy();
});
describe('with closed nested section', () => {
beforeEach(() => {
createComponent({
section: nestedSectionClosed,
traceEndpoint,
});
});
it('renders clickable header line', () => {
expect(findCollapsibleLine().attributes('role')).toBe('button');
});
});
describe('with opened nested section', () => {
beforeEach(() => {
createComponent({
section: nestedSectionOpened,
traceEndpoint,
});
});
it('renders all sections opened', () => {
expect(wrapper.findAll('.collapsible-line').length).toBe(2);
});
});
it('emits onClickCollapsibleLine on click', () => {
createComponent({
section: nestedSectionOpened,
traceEndpoint,
});
findCollapsibleLine().trigger('click');
expect(wrapper.emitted('onClickCollapsibleLine').length).toBe(1);
});
});
Loading
Loading
@@ -150,3 +150,73 @@ export const collapsibleTraceIncremental = [
sections: ['section'],
},
];
export const nestedSectionClosed = {
offset: 5,
section_header: true,
isHeader: true,
isClosed: true,
line: {
content: [{ text: 'foo' }],
sections: ['prepare-script'],
lineNumber: 1,
},
section_duration: '00:03',
lines: [
{
section_header: true,
section_duration: '00:02',
isHeader: true,
isClosed: true,
line: {
offset: 52,
content: [{ text: 'bar' }],
sections: ['prepare-script', 'prepare-script-nested'],
lineNumber: 2,
},
lines: [
{
offset: 80,
content: [{ text: 'this is a collapsible nested section' }],
sections: ['prepare-script', 'prepare-script-nested'],
lineNumber: 3,
},
],
},
],
};
export const nestedSectionOpened = {
offset: 5,
section_header: true,
isHeader: true,
isClosed: false,
line: {
content: [{ text: 'foo' }],
sections: ['prepare-script'],
lineNumber: 1,
},
section_duration: '00:03',
lines: [
{
section_header: true,
section_duration: '00:02',
isHeader: true,
isClosed: false,
line: {
offset: 52,
content: [{ text: 'bar' }],
sections: ['prepare-script', 'prepare-script-nested'],
lineNumber: 2,
},
lines: [
{
offset: 80,
content: [{ text: 'this is a collapsible nested section' }],
sections: ['prepare-script', 'prepare-script-nested'],
lineNumber: 3,
},
],
},
],
};
Loading
Loading
@@ -182,24 +182,24 @@ describe Gitlab::GitalyClient do
end
 
it 'sets the gitaly-session-id in the metadata' do
results = described_class.request_kwargs('default', nil)
results = described_class.request_kwargs('default', timeout: 1)
expect(results[:metadata]).to include('gitaly-session-id')
end
 
context 'when RequestStore is not enabled' do
it 'sets a different gitaly-session-id per request' do
gitaly_session_id = described_class.request_kwargs('default', nil)[:metadata]['gitaly-session-id']
gitaly_session_id = described_class.request_kwargs('default', timeout: 1)[:metadata]['gitaly-session-id']
 
expect(described_class.request_kwargs('default', nil)[:metadata]['gitaly-session-id']).not_to eq(gitaly_session_id)
expect(described_class.request_kwargs('default', timeout: 1)[:metadata]['gitaly-session-id']).not_to eq(gitaly_session_id)
end
end
 
context 'when RequestStore is enabled', :request_store do
it 'sets the same gitaly-session-id on every outgoing request metadata' do
gitaly_session_id = described_class.request_kwargs('default', nil)[:metadata]['gitaly-session-id']
gitaly_session_id = described_class.request_kwargs('default', timeout: 1)[:metadata]['gitaly-session-id']
 
3.times do
expect(described_class.request_kwargs('default', nil)[:metadata]['gitaly-session-id']).to eq(gitaly_session_id)
expect(described_class.request_kwargs('default', timeout: 1)[:metadata]['gitaly-session-id']).to eq(gitaly_session_id)
end
end
end
Loading
Loading
Loading
Loading
@@ -75,12 +75,6 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
end
end
 
it 'invoke rss_within_range? twice' do
expect(memory_killer).to receive(:rss_within_range?).twice
subject
end
it 'not invoke restart_sidekiq when rss in range' do
expect(memory_killer).to receive(:rss_within_range?).twice.and_return(true)
 
Loading
Loading
@@ -128,7 +122,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(memory_killer).to receive(:soft_limit_rss).and_return(200)
expect(memory_killer).to receive(:hard_limit_rss).and_return(300)
 
expect(Time).to receive(:now).and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_call_original
expect(memory_killer).not_to receive(:log_rss_out_of_range)
 
expect(subject).to be true
Loading
Loading
@@ -139,7 +133,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(memory_killer).to receive(:soft_limit_rss).at_least(:once).and_return(200)
expect(memory_killer).to receive(:hard_limit_rss).at_least(:once).and_return(300)
 
expect(Time).to receive(:now).and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_call_original
 
expect(memory_killer).to receive(:log_rss_out_of_range).with(400, 300, 200)
 
Loading
Loading
@@ -151,7 +145,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(memory_killer).to receive(:soft_limit_rss).at_least(:once).and_return(200)
expect(memory_killer).to receive(:hard_limit_rss).at_least(:once).and_return(300)
 
expect(Time).to receive(:now).twice.and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).twice.and_call_original
expect(memory_killer).to receive(:sleep).with(check_interval_seconds)
 
expect(memory_killer).to receive(:log_rss_out_of_range).with(400, 300, 200)
Loading
Loading
@@ -164,7 +158,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(memory_killer).to receive(:soft_limit_rss).and_return(200, 200)
expect(memory_killer).to receive(:hard_limit_rss).and_return(300, 300)
 
expect(Time).to receive(:now).twice.and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).twice.and_call_original
expect(memory_killer).to receive(:sleep).with(check_interval_seconds)
 
expect(memory_killer).not_to receive(:log_rss_out_of_range)
Loading
Loading
@@ -177,7 +171,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(memory_killer).to receive(:soft_limit_rss).exactly(5).times.and_return(200)
expect(memory_killer).to receive(:hard_limit_rss).exactly(5).times.and_return(300)
 
expect(Time).to receive(:now).exactly(5).times.and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).exactly(5).times.and_call_original
expect(memory_killer).to receive(:sleep).exactly(3).times.with(check_interval_seconds).and_call_original
 
expect(memory_killer).to receive(:log_rss_out_of_range).with(250, 300, 200)
Loading
Loading
@@ -219,7 +213,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
 
it 'send signal and return when all jobs finished' do
expect(Process).to receive(:kill).with(signal, pid).ordered
expect(Time).to receive(:now).and_call_original
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_call_original
 
expect(memory_killer).to receive(:enabled?).and_return(true)
expect(memory_killer).to receive(:any_jobs?).and_return(false)
Loading
Loading
@@ -231,7 +225,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
 
it 'send signal and wait till deadline if any job not finished' do
expect(Process).to receive(:kill).with(signal, pid).ordered
expect(Time).to receive(:now).and_call_original.at_least(:once)
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_call_original.at_least(:once)
 
expect(memory_killer).to receive(:enabled?).and_return(true).at_least(:once)
expect(memory_killer).to receive(:any_jobs?).and_return(true).at_least(:once)
Loading
Loading
@@ -351,7 +345,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
subject { memory_killer.send(:rss_increase_by_job, job) }
 
before do
stub_const("#{described_class}::MAX_MEMORY_KB", max_memory_kb)
stub_const("#{described_class}::DEFAULT_MAX_MEMORY_GROWTH_KB", max_memory_kb)
end
 
it 'return 0 if memory_growth_kb return 0' do
Loading
Loading
@@ -366,7 +360,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_memory_growth_kb', 0).and_return(10)
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_max_memory_growth_kb', max_memory_kb).and_return(100)
 
expect(Time).to receive(:now).and_return(323)
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(323)
expect(subject).to eq(20)
end
 
Loading
Loading
@@ -374,7 +368,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_memory_growth_kb', 0).and_return(10)
expect(memory_killer).to receive(:get_job_options).with(job, 'memory_killer_max_memory_growth_kb', max_memory_kb).and_return(100)
 
expect(Time).to receive(:now).and_return(332)
expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(332)
expect(subject).to eq(100)
end
end
Loading
Loading
Loading
Loading
@@ -16,7 +16,9 @@ describe ObjectPool::DestroyWorker do
subject { described_class.new }
 
it 'requests Gitaly to remove the object pool' do
expect(Gitlab::GitalyClient).to receive(:call).with(pool.shard_name, :object_pool_service, :delete_object_pool, Object)
expect(Gitlab::GitalyClient).to receive(:call)
.with(pool.shard_name, :object_pool_service, :delete_object_pool,
Object, timeout: Gitlab::GitalyClient.long_timeout)
 
subject.perform(pool.id)
end
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment