Skip to content
Snippets Groups Projects
Commit e4dffdfe authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 0ab47b99
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -68,12 +68,20 @@ module QA
Page::Project::Menu.perform(&:click_ci_cd_pipelines)
Page::Project::Pipeline::Index.perform(&:click_on_latest_pipeline)
 
Page::Project::Pipeline::Show.perform do |pipeline|
expect(pipeline).to be_running(wait: max_wait)
expect(pipeline).to have_build('test-success', status: :success, wait: max_wait)
expect(pipeline).to have_build('test-failure', status: :failed, wait: max_wait)
expect(pipeline).to have_build('test-tags', status: :pending, wait: max_wait)
expect(pipeline).to have_build('test-artifacts', status: :success, wait: max_wait)
{
'test-success': :passed,
'test-failure': :failed,
'test-tags': :pending,
'test-artifacts': :passed
}.each do |job, status|
Page::Project::Pipeline::Show.perform do |pipeline|
pipeline.click_job(job)
end
Page::Project::Job::Show.perform do |show|
expect(show).to public_send("be_#{status}")
show.click_element(:pipeline_path, Page::Project::Pipeline::Show)
end
end
end
end
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::LoopingBatcher, :use_clean_rails_memory_store_caching do
describe '#next_range!' do
let(:model_class) { LfsObject }
let(:key) { 'looping_batcher_spec' }
let(:batch_size) { 2 }
subject { described_class.new(model_class, key: key, batch_size: batch_size).next_range! }
context 'when there are no records' do
it { is_expected.to be_nil }
end
context 'when there are records' do
let!(:records) { create_list(model_class.underscore, 3) }
context 'when it has never been called before' do
it { is_expected.to be_a Range }
it 'starts from the beginning' do
expect(subject.first).to eq(1)
end
it 'ends at a full batch' do
expect(subject.last).to eq(records.second.id)
end
context 'when the batch size is greater than the number of records' do
let(:batch_size) { 5 }
it 'ends at the last ID' do
expect(subject.last).to eq(records.last.id)
end
end
end
context 'when it was called before' do
context 'when the previous batch included the end of the table' do
before do
described_class.new(model_class, key: key, batch_size: model_class.count).next_range!
end
it 'starts from the beginning' do
expect(subject).to eq(1..records.second.id)
end
end
context 'when the previous batch did not include the end of the table' do
before do
described_class.new(model_class, key: key, batch_size: model_class.count - 1).next_range!
end
it 'starts after the previous batch' do
expect(subject).to eq(records.last.id..records.last.id)
end
end
context 'if cache is cleared' do
it 'starts from the beginning' do
Rails.cache.clear
expect(subject).to eq(1..records.second.id)
end
end
end
end
end
end
Loading
Loading
@@ -1117,6 +1117,10 @@ describe Ci::Pipeline, :mailer do
end
 
describe 'pipeline caching' do
before do
pipeline.config_source = 'repository_source'
end
it 'performs ExpirePipelinesCacheWorker' do
expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
 
Loading
Loading
Loading
Loading
@@ -3,9 +3,9 @@
require 'spec_helper'
 
describe ExpirePipelineCacheWorker do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
 
subject { described_class.new }
 
Loading
Loading
@@ -22,5 +22,14 @@ describe ExpirePipelineCacheWorker do
 
subject.perform(617748)
end
it "doesn't do anything if the pipeline cannot be cached" do
allow_any_instance_of(Ci::Pipeline).to receive(:cacheable?).and_return(false)
expect_any_instance_of(Ci::ExpirePipelineCacheService).not_to receive(:execute)
expect_any_instance_of(Gitlab::EtagCaching::Store).not_to receive(:touch)
subject.perform(pipeline.id)
end
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment