Skip to content
Snippets Groups Projects
Commit 47d1f417 authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 006e8969
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -158,16 +158,19 @@ describe('DiffsStoreActions', () => {
const res1 = { diff_files: [], pagination: { next_page: 2 } };
const res2 = { diff_files: [], pagination: {} };
mock
.onGet(endpointBatch, { params: { page: undefined, per_page: DIFFS_PER_PAGE, w: '1' } })
.reply(200, res1);
mock
.onGet(endpointBatch, { params: { page: 2, per_page: DIFFS_PER_PAGE, w: '1' } })
.onGet(endpointBatch, {
params: { page: 1, per_page: DIFFS_PER_PAGE, w: '1', view: 'inline' },
})
.reply(200, res1)
.onGet(endpointBatch, {
params: { page: 2, per_page: DIFFS_PER_PAGE, w: '1', view: 'inline' },
})
.reply(200, res2);
 
testAction(
fetchDiffFilesBatch,
{},
{ endpointBatch },
{ endpointBatch, useSingleDiffStyle: true, diffViewType: 'inline' },
[
{ type: types.SET_BATCH_LOADING, payload: true },
{ type: types.SET_RETRIEVING_BATCHES, payload: true },
Loading
Loading
@@ -188,7 +191,7 @@ describe('DiffsStoreActions', () => {
 
describe('fetchDiffFilesMeta', () => {
it('should fetch diff meta information', done => {
const endpointMetadata = '/fetch/diffs_meta';
const endpointMetadata = '/fetch/diffs_meta?view=inline';
const mock = new MockAdapter(axios);
const data = { diff_files: [] };
const res = { data };
Loading
Loading
@@ -213,6 +216,108 @@ describe('DiffsStoreActions', () => {
});
});
 
describe('when the single diff view feature flag is off', () => {
describe('fetchDiffFiles', () => {
it('should fetch diff files', done => {
const endpoint = '/fetch/diff/files?w=1';
const mock = new MockAdapter(axios);
const res = { diff_files: 1, merge_request_diffs: [] };
mock.onGet(endpoint).reply(200, res);
testAction(
fetchDiffFiles,
{},
{
endpoint,
diffFiles: [],
showWhitespace: false,
diffViewType: 'inline',
useSingleDiffStyle: false,
},
[
{ type: types.SET_LOADING, payload: true },
{ type: types.SET_LOADING, payload: false },
{ type: types.SET_MERGE_REQUEST_DIFFS, payload: res.merge_request_diffs },
{ type: types.SET_DIFF_DATA, payload: res },
],
[],
() => {
mock.restore();
done();
},
);
fetchDiffFiles({ state: { endpoint }, commit: () => null })
.then(data => {
expect(data).toEqual(res);
done();
})
.catch(done.fail);
});
});
describe('fetchDiffFilesBatch', () => {
it('should fetch batch diff files', done => {
const endpointBatch = '/fetch/diffs_batch';
const mock = new MockAdapter(axios);
const res1 = { diff_files: [], pagination: { next_page: 2 } };
const res2 = { diff_files: [], pagination: {} };
mock
.onGet(endpointBatch, { params: { page: 1, per_page: DIFFS_PER_PAGE, w: '1' } })
.reply(200, res1)
.onGet(endpointBatch, { params: { page: 2, per_page: DIFFS_PER_PAGE, w: '1' } })
.reply(200, res2);
testAction(
fetchDiffFilesBatch,
{},
{ endpointBatch, useSingleDiffStyle: false },
[
{ type: types.SET_BATCH_LOADING, payload: true },
{ type: types.SET_RETRIEVING_BATCHES, payload: true },
{ type: types.SET_DIFF_DATA_BATCH, payload: { diff_files: res1.diff_files } },
{ type: types.SET_BATCH_LOADING, payload: false },
{ type: types.SET_DIFF_DATA_BATCH, payload: { diff_files: [] } },
{ type: types.SET_BATCH_LOADING, payload: false },
{ type: types.SET_RETRIEVING_BATCHES, payload: false },
],
[],
() => {
mock.restore();
done();
},
);
});
});
describe('fetchDiffFilesMeta', () => {
it('should fetch diff meta information', done => {
const endpointMetadata = '/fetch/diffs_meta?';
const mock = new MockAdapter(axios);
const data = { diff_files: [] };
const res = { data };
mock.onGet(endpointMetadata).reply(200, res);
testAction(
fetchDiffFilesMeta,
{},
{ endpointMetadata, useSingleDiffStyle: false },
[
{ type: types.SET_LOADING, payload: true },
{ type: types.SET_LOADING, payload: false },
{ type: types.SET_MERGE_REQUEST_DIFFS, payload: [] },
{ type: types.SET_DIFF_DATA, payload: { data } },
],
[],
() => {
mock.restore();
done();
},
);
});
});
});
describe('setHighlightedRow', () => {
it('should mark currently selected diff and set lineHash and fileHash of highlightedRow', () => {
testAction(setHighlightedRow, 'ABC_123', {}, [
Loading
Loading
Loading
Loading
@@ -55,8 +55,8 @@ describe('DiffsStoreMutations', () => {
const state = {
diffFiles: [
{
content_sha: diffFileMockData.content_sha,
file_hash: diffFileMockData.file_hash,
...diffFileMockData,
parallel_diff_lines: [],
},
],
};
Loading
Loading
Loading
Loading
@@ -333,10 +333,10 @@ describe('DiffsStoreUtils', () => {
diff_files: [Object.assign({}, mock, { highlighted_diff_lines: undefined })],
};
 
utils.prepareDiffData(preparedDiff);
utils.prepareDiffData(splitInlineDiff);
utils.prepareDiffData(splitParallelDiff);
utils.prepareDiffData(completedDiff, [mock]);
preparedDiff.diff_files = utils.prepareDiffData(preparedDiff);
splitInlineDiff.diff_files = utils.prepareDiffData(splitInlineDiff);
splitParallelDiff.diff_files = utils.prepareDiffData(splitParallelDiff);
completedDiff.diff_files = utils.prepareDiffData(completedDiff, [mock]);
});
 
it('sets the renderIt and collapsed attribute on files', () => {
Loading
Loading
@@ -390,6 +390,37 @@ describe('DiffsStoreUtils', () => {
expect(completedDiff.diff_files[0].parallel_diff_lines.length).toBeGreaterThan(0);
expect(completedDiff.diff_files[0].highlighted_diff_lines.length).toBeGreaterThan(0);
});
it('leaves files in the existing state', () => {
const priorFiles = [mock];
const fakeNewFile = {
...mock,
content_sha: 'ABC',
file_hash: 'DEF',
};
const updatedFilesList = utils.prepareDiffData({ diff_files: [fakeNewFile] }, priorFiles);
expect(updatedFilesList).toEqual([mock, fakeNewFile]);
});
it('completes an existing split diff without overwriting existing diffs', () => {
// The current state has a file that has only loaded inline lines
const priorFiles = [{ ...mock, parallel_diff_lines: [] }];
// The next (batch) load loads two files: the other half of that file, and a new file
const fakeBatch = [
{ ...mock, highlighted_diff_lines: undefined },
{ ...mock, highlighted_diff_lines: undefined, content_sha: 'ABC', file_hash: 'DEF' },
];
const updatedFilesList = utils.prepareDiffData({ diff_files: fakeBatch }, priorFiles);
expect(updatedFilesList).toEqual([
mock,
jasmine.objectContaining({
content_sha: 'ABC',
file_hash: 'DEF',
}),
]);
});
});
 
describe('isDiscussionApplicableToLine', () => {
Loading
Loading
Loading
Loading
@@ -86,6 +86,22 @@ describe Backup::Repository do
expect(pool_repository.object_pool.exists?).to be(true)
end
end
it 'cleans existing repositories' do
wiki_repository_spy = spy(:wiki)
allow_next_instance_of(ProjectWiki) do |project_wiki|
allow(project_wiki).to receive(:repository).and_return(wiki_repository_spy)
end
expect_next_instance_of(Repository) do |repo|
expect(repo).to receive(:remove)
end
subject.restore
expect(wiki_repository_spy).to have_received(:remove)
end
end
 
describe '#empty_repo?' do
Loading
Loading
Loading
Loading
@@ -289,5 +289,86 @@ describe Projects::Operations::UpdateService do
end
end
end
context 'prometheus integration' do
context 'prometheus params were passed into service' do
let(:prometheus_service) do
build_stubbed(:prometheus_service, project: project, properties: {
api_url: "http://example.prometheus.com",
manual_configuration: "0"
})
end
let(:prometheus_params) do
{
"type" => "PrometheusService",
"title" => nil,
"active" => true,
"properties" => { "api_url" => "http://example.prometheus.com", "manual_configuration" => "0" },
"instance" => false,
"push_events" => true,
"issues_events" => true,
"merge_requests_events" => true,
"tag_push_events" => true,
"note_events" => true,
"category" => "monitoring",
"default" => false,
"wiki_page_events" => true,
"pipeline_events" => true,
"confidential_issues_events" => true,
"commit_events" => true,
"job_events" => true,
"confidential_note_events" => true,
"deployment_events" => false,
"description" => nil,
"comment_on_event_enabled" => true
}
end
let(:params) do
{
prometheus_integration_attributes: {
api_url: 'http://new.prometheus.com',
manual_configuration: '1'
}
}
end
it 'uses Project#find_or_initialize_service to include instance defined defaults and pass them to Projects::UpdateService', :aggregate_failures do
project_update_service = double(Projects::UpdateService)
prometheus_update_params = prometheus_params.merge('properties' => {
'api_url' => 'http://new.prometheus.com',
'manual_configuration' => '1'
})
expect(project)
.to receive(:find_or_initialize_service)
.with('prometheus')
.and_return(prometheus_service)
expect(Projects::UpdateService)
.to receive(:new)
.with(project, user, { prometheus_service_attributes: prometheus_update_params })
.and_return(project_update_service)
expect(project_update_service).to receive(:execute)
subject.execute
end
end
context 'prometheus params were not passed into service' do
let(:params) { { something: :else } }
it 'does not pass any prometheus params into Projects::UpdateService', :aggregate_failures do
project_update_service = double(Projects::UpdateService)
expect(project).not_to receive(:find_or_initialize_service)
expect(Projects::UpdateService)
.to receive(:new)
.with(project, user, {})
.and_return(project_update_service)
expect(project_update_service).to receive(:execute)
subject.execute
end
end
end
end
end
Loading
Loading
@@ -497,6 +497,63 @@ describe Projects::UpdateService do
update_project(project, user, { name: 'New name' })
end
end
context 'when updating nested attributes for prometheus service' do
context 'prometheus service exists' do
let(:prometheus_service_attributes) do
attributes_for(:prometheus_service,
project: project,
properties: { api_url: "http://new.prometheus.com", manual_configuration: "0" }
)
end
let!(:prometheus_service) do
create(:prometheus_service,
project: project,
properties: { api_url: "http://old.prometheus.com", manual_configuration: "0" }
)
end
it 'updates existing record' do
expect { update_project(project, user, prometheus_service_attributes: prometheus_service_attributes) }
.to change { prometheus_service.reload.api_url }
.from("http://old.prometheus.com")
.to("http://new.prometheus.com")
end
end
context 'prometheus service does not exist' do
context 'valid parameters' do
let(:prometheus_service_attributes) do
attributes_for(:prometheus_service,
project: project,
properties: { api_url: "http://example.prometheus.com", manual_configuration: "0" }
)
end
it 'creates new record' do
expect { update_project(project, user, prometheus_service_attributes: prometheus_service_attributes) }
.to change { ::PrometheusService.where(project: project).count }
.from(0)
.to(1)
end
end
context 'invalid parameters' do
let(:prometheus_service_attributes) do
attributes_for(:prometheus_service,
project: project,
properties: { api_url: nil, manual_configuration: "1" }
)
end
it 'does not create new record' do
expect { update_project(project, user, prometheus_service_attributes: prometheus_service_attributes) }
.not_to change { ::PrometheusService.where(project: project).count }
end
end
end
end
end
 
describe '#run_auto_devops_pipeline?' do
Loading
Loading
Loading
Loading
@@ -5413,9 +5413,9 @@ handle-thing@^2.0.0:
integrity sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ==
 
handlebars@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.1.2.tgz#b6b37c1ced0306b221e094fc7aca3ec23b131b67"
integrity sha512-nvfrjqvt9xQ8Z/w0ijewdD/vvWDTOweBUm96NTr66Wfvo1mJenBLwcYmPs3TIBP5ruzYGD7Hx/DaM9RmhroGPw==
version "4.7.2"
resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.2.tgz#01127b3840156a0927058779482031afe0e730d7"
integrity sha512-4PwqDL2laXtTWZghzzCtunQUTLbo31pcCJrd/B/9JP8XbhVzpS5ZXuKqlOzsd1rtcaLo4KqAn8nl8mkknS4MHw==
dependencies:
neo-async "^2.6.0"
optimist "^0.6.1"
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment