Skip to content
Snippets Groups Projects
Commit afe2b984 authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 5a6b36b6
No related branches found
No related tags found
No related merge requests found
Showing
with 482 additions and 169 deletions
Loading
Loading
@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
 
ActiveRecord::Schema.define(version: 2020_03_04_090155) do
ActiveRecord::Schema.define(version: 2020_03_04_160823) do
 
# These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm"
Loading
Loading
@@ -3898,6 +3898,7 @@ ActiveRecord::Schema.define(version: 2020_03_04_090155) do
t.boolean "template", default: false
t.index ["project_id"], name: "index_services_on_project_id"
t.index ["template"], name: "index_services_on_template"
t.index ["type", "template"], name: "index_services_on_type_and_template", unique: true, where: "(template IS TRUE)"
t.index ["type"], name: "index_services_on_type"
end
 
Loading
Loading
# frozen_string_literal: true
module Gitlab
module Database
class ConnectionTimer
DEFAULT_INTERVAL = 3600
RANDOMIZATION_INTERVAL = 600
class << self
def configure
yield self
end
def starting_now
# add a small amount of randomization to the interval, so reconnects don't all occur at once
new(interval_with_randomization, current_clock_value)
end
attr_writer :interval
def interval
@interval ||= DEFAULT_INTERVAL
end
def interval_with_randomization
interval + rand(RANDOMIZATION_INTERVAL) if interval.positive?
end
def current_clock_value
Concurrent.monotonic_time
end
end
attr_reader :interval, :starting_clock_value
def initialize(interval, starting_clock_value)
@interval = interval
@starting_clock_value = starting_clock_value
end
def expired?
interval&.positive? && self.class.current_clock_value > (starting_clock_value + interval)
end
def reset!
@starting_clock_value = self.class.current_clock_value
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Database
module PostgresqlAdapter
module ForceDisconnectableMixin
extend ActiveSupport::Concern
prepended do
set_callback :checkin, :after, :force_disconnect_if_old!
end
def force_disconnect_if_old!
if force_disconnect_timer.expired?
disconnect!
reset_force_disconnect_timer!
end
end
def reset_force_disconnect_timer!
force_disconnect_timer.reset!
end
def force_disconnect_timer
@force_disconnect_timer ||= ConnectionTimer.starting_now
end
end
end
end
end
Loading
Loading
@@ -43,10 +43,7 @@ module Gitlab
 
def read_tree_hash
path = File.join(@shared.export_path, 'project.json')
dedup_entries = large_project?(path) &&
Feature.enabled?(:dedup_project_import_metadata, project.group)
@tree_loader.load(path, dedup_entries: dedup_entries)
@tree_loader.load(path, dedup_entries: large_project?(path))
rescue => e
Rails.logger.error("Import/Export error: #{e.message}") # rubocop:disable Gitlab/RailsLogger
raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
Loading
Loading
Loading
Loading
@@ -11,6 +11,7 @@ class GitlabDanger
karma
database
commit_messages
telemetry
].freeze
 
CI_ONLY_RULES ||= %w[
Loading
Loading
Loading
Loading
@@ -18480,6 +18480,9 @@ msgstr ""
msgid "Specific Runners"
msgstr ""
 
msgid "Specified URL cannot be used."
msgstr ""
msgid "Specify an e-mail address regex pattern to identify default internal users."
msgstr ""
 
Loading
Loading
@@ -22296,7 +22299,7 @@ msgstr ""
msgid "WikiMarkdownDocs|documentation"
msgstr ""
 
msgid "WikiMarkdownTip|To link to a (new) page, simply type %{link_example}"
msgid "WikiMarkdownTip|To link to a (new) page, simply type <code class=\"js-markup-link-example\">%{link_example}</code>"
msgstr ""
 
msgid "WikiNewPageTip|Tip: You can specify the full path for the new file. We will automatically create any missing directories."
Loading
Loading
Loading
Loading
@@ -530,41 +530,6 @@ describe ApplicationController do
 
expect(controller.last_payload).to include('correlation_id' => 'new-id')
end
context '422 errors' do
it 'logs a response with a string' do
response = spy(ActionDispatch::Response, status: 422, body: 'Hello world', content_type: 'application/json', cookies: {})
allow(controller).to receive(:response).and_return(response)
get :index
expect(controller.last_payload[:response]).to eq('Hello world')
end
it 'logs a response with an array' do
body = ['I want', 'my hat back']
response = spy(ActionDispatch::Response, status: 422, body: body, content_type: 'application/json', cookies: {})
allow(controller).to receive(:response).and_return(response)
get :index
expect(controller.last_payload[:response]).to eq(body)
end
it 'does not log a string with an empty body' do
response = spy(ActionDispatch::Response, status: 422, body: nil, content_type: 'application/json', cookies: {})
allow(controller).to receive(:response).and_return(response)
get :index
expect(controller.last_payload.has_key?(:response)).to be_falsey
end
it 'does not log an HTML body' do
response = spy(ActionDispatch::Response, status: 422, body: 'This is a test', content_type: 'application/html', cookies: {})
allow(controller).to receive(:response).and_return(response)
get :index
expect(controller.last_payload.has_key?(:response)).to be_falsey
end
end
end
 
describe '#access_denied' do
Loading
Loading
Loading
Loading
@@ -28,10 +28,24 @@ describe Import::GiteaController do
 
describe "GET status" do
it_behaves_like 'a GitHub-ish import controller: GET status' do
let(:extra_assign_expectations) { { gitea_host_url: host_url } }
before do
assign_host_url
end
let(:extra_assign_expectations) { { gitea_host_url: host_url } }
context 'when host url is local or not http' do
%w[https://localhost:3000 http://192.168.0.1 ftp://testing].each do |url|
let(:host_url) { url }
it 'denies network request' do
get :status, format: :json
expect(controller).to redirect_to(new_import_url)
expect(flash[:alert]).to eq('Specified URL cannot be used.')
end
end
end
end
end
 
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
describe MetricsDashboard, '(JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
include MetricsDashboardHelpers
let(:user) { create(:user) }
let(:project) { project_with_dashboard('.gitlab/dashboards/test.yml') }
let(:environment) { create(:environment, project: project) }
let(:params) { { environment: environment } }
before(:all) do
clean_frontend_fixtures('metrics_dashboard/')
end
controller(::ApplicationController) do
include MetricsDashboard
end
before do
sign_in(user)
project.add_maintainer(user)
allow(controller).to receive(:project).and_return(project)
allow(controller)
.to receive(:metrics_dashboard_params)
.and_return(params)
end
after do
remove_repository(project)
end
it 'metrics_dashboard/environment_metrics_dashboard.json' do
routes.draw { get "metrics_dashboard" => "anonymous#metrics_dashboard" }
response = get :metrics_dashboard, format: :json
expect(response).to be_successful
end
end
Loading
Loading
@@ -10,16 +10,21 @@ import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import * as types from '~/monitoring/stores/mutation_types';
import {
deploymentData,
metricsDashboardPayload,
mockedQueryResultPayload,
mockedQueryResultFixture,
metricsDashboardViewModel,
mockProjectDir,
mockHost,
} from '../../mock_data';
import * as iconUtils from '~/lib/utils/icon_utils';
import { getJSONFixture } from '../../../helpers/fixtures';
 
const mockSvgPathContent = 'mockSvgPathContent';
 
const metricsDashboardFixture = getJSONFixture(
'metrics_dashboard/environment_metrics_dashboard.json',
);
const metricsDashboardPayload = metricsDashboardFixture.dashboard;
jest.mock('lodash/throttle', () =>
// this throttle mock executes immediately
jest.fn(func => {
Loading
Loading
@@ -59,13 +64,11 @@ describe('Time series component', () => {
 
store.commit(`monitoringDashboard/${types.RECEIVE_DEPLOYMENTS_DATA_SUCCESS}`, deploymentData);
 
// Mock data contains 2 panel groups, with 1 and 2 panels respectively
store.commit(
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
mockedQueryResultPayload,
mockedQueryResultFixture,
);
// Pick the second panel group and the first panel in it
// dashboard is a dynamically generated fixture and stored at environment_metrics_dashboard.json
[mockGraphData] = store.state.monitoringDashboard.dashboard.panelGroups[0].panels;
});
 
Loading
Loading
@@ -189,9 +192,8 @@ describe('Time series component', () => {
});
 
it('formats tooltip content', () => {
const name = 'Total';
const value = '5.556MB';
const name = 'Status Code';
const value = '5.556';
const dataIndex = 0;
const seriesLabel = timeSeriesChart.find(GlChartSeriesLabel);
 
Loading
Loading
@@ -399,7 +401,7 @@ describe('Time series component', () => {
});
 
it('formats and rounds to 2 decimal places', () => {
expect(dataFormatter(0.88888)).toBe('0.89MB');
expect(dataFormatter(0.88888)).toBe('0.89');
});
 
it('deployment formatter is set as is required to display a tooltip', () => {
Loading
Loading
@@ -441,7 +443,7 @@ describe('Time series component', () => {
it('constructs a label for the chart y-axis', () => {
const { yAxis } = getChartOptions();
 
expect(yAxis[0].name).toBe('Total Memory Used');
expect(yAxis[0].name).toBe('Requests / Sec');
});
});
});
Loading
Loading
@@ -544,7 +546,7 @@ describe('Time series component', () => {
store = createStore();
const graphData = cloneDeep(metricsDashboardViewModel.panelGroups[0].panels[3]);
graphData.metrics.forEach(metric =>
Object.assign(metric, { result: mockedQueryResultPayload.result }),
Object.assign(metric, { result: mockedQueryResultFixture.result }),
);
 
timeSeriesChart = makeTimeSeriesChart(graphData, 'area-chart');
Loading
Loading
Loading
Loading
@@ -6,6 +6,7 @@ import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import { metricStates } from '~/monitoring/constants';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { getJSONFixture } from '../../../../spec/frontend/helpers/fixtures';
 
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
Loading
Loading
@@ -15,16 +16,20 @@ import { createStore } from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
import { setupComponentStore, propsData } from '../init_utils';
import {
metricsDashboardPayload,
mockedQueryResultPayload,
metricsDashboardViewModel,
environmentData,
dashboardGitResponse,
mockedQueryResultFixture,
} from '../mock_data';
 
const localVue = createLocalVue();
const expectedPanelCount = 4;
 
const metricsDashboardFixture = getJSONFixture(
'metrics_dashboard/environment_metrics_dashboard.json',
);
const metricsDashboardPayload = metricsDashboardFixture.dashboard;
describe('Dashboard', () => {
let store;
let wrapper;
Loading
Loading
@@ -196,7 +201,7 @@ describe('Dashboard', () => {
);
wrapper.vm.$store.commit(
`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`,
mockedQueryResultPayload,
mockedQueryResultFixture,
);
 
return wrapper.vm.$nextTick().then(() => {
Loading
Loading
Loading
Loading
@@ -242,95 +242,75 @@ export const metricsNewGroupsAPIResponse = [
},
];
 
const metricsResult = [
{
metric: {},
values: [
[1563272065.589, '10.396484375'],
[1563272125.589, '10.333984375'],
[1563272185.589, '10.333984375'],
[1563272245.589, '10.333984375'],
[1563272305.589, '10.333984375'],
[1563272365.589, '10.333984375'],
[1563272425.589, '10.38671875'],
[1563272485.589, '10.333984375'],
[1563272545.589, '10.333984375'],
[1563272605.589, '10.333984375'],
[1563272665.589, '10.333984375'],
[1563272725.589, '10.333984375'],
[1563272785.589, '10.396484375'],
[1563272845.589, '10.333984375'],
[1563272905.589, '10.333984375'],
[1563272965.589, '10.3984375'],
[1563273025.589, '10.337890625'],
[1563273085.589, '10.34765625'],
[1563273145.589, '10.337890625'],
[1563273205.589, '10.337890625'],
[1563273265.589, '10.337890625'],
[1563273325.589, '10.337890625'],
[1563273385.589, '10.337890625'],
[1563273445.589, '10.337890625'],
[1563273505.589, '10.337890625'],
[1563273565.589, '10.337890625'],
[1563273625.589, '10.337890625'],
[1563273685.589, '10.337890625'],
[1563273745.589, '10.337890625'],
[1563273805.589, '10.337890625'],
[1563273865.589, '10.390625'],
[1563273925.589, '10.390625'],
],
},
];
export const mockedEmptyResult = {
metricId: '1_response_metrics_nginx_ingress_throughput_status_code',
result: [],
};
 
export const mockedEmptyThroughputResult = {
metricId: 'undefined_response_metrics_nginx_ingress_16_throughput_status_code',
result: [],
};
export const mockedQueryResultPayload = {
metricId: '12_system_metrics_kubernetes_container_memory_total',
result: [
{
metric: {},
values: [
[1563272065.589, '10.396484375'],
[1563272125.589, '10.333984375'],
[1563272185.589, '10.333984375'],
[1563272245.589, '10.333984375'],
[1563272305.589, '10.333984375'],
[1563272365.589, '10.333984375'],
[1563272425.589, '10.38671875'],
[1563272485.589, '10.333984375'],
[1563272545.589, '10.333984375'],
[1563272605.589, '10.333984375'],
[1563272665.589, '10.333984375'],
[1563272725.589, '10.333984375'],
[1563272785.589, '10.396484375'],
[1563272845.589, '10.333984375'],
[1563272905.589, '10.333984375'],
[1563272965.589, '10.3984375'],
[1563273025.589, '10.337890625'],
[1563273085.589, '10.34765625'],
[1563273145.589, '10.337890625'],
[1563273205.589, '10.337890625'],
[1563273265.589, '10.337890625'],
[1563273325.589, '10.337890625'],
[1563273385.589, '10.337890625'],
[1563273445.589, '10.337890625'],
[1563273505.589, '10.337890625'],
[1563273565.589, '10.337890625'],
[1563273625.589, '10.337890625'],
[1563273685.589, '10.337890625'],
[1563273745.589, '10.337890625'],
[1563273805.589, '10.337890625'],
[1563273865.589, '10.390625'],
[1563273925.589, '10.390625'],
],
},
],
result: metricsResult,
};
 
export const mockedQueryResultPayloadCoresTotal = {
metricId: '13_system_metrics_kubernetes_container_cores_total',
result: [
{
metric: {},
values: [
[1563272065.589, '9.396484375'],
[1563272125.589, '9.333984375'],
[1563272185.589, '9.333984375'],
[1563272245.589, '9.333984375'],
[1563272305.589, '9.333984375'],
[1563272365.589, '9.333984375'],
[1563272425.589, '9.38671875'],
[1563272485.589, '9.333984375'],
[1563272545.589, '9.333984375'],
[1563272605.589, '9.333984375'],
[1563272665.589, '9.333984375'],
[1563272725.589, '9.333984375'],
[1563272785.589, '9.396484375'],
[1563272845.589, '9.333984375'],
[1563272905.589, '9.333984375'],
[1563272965.589, '9.3984375'],
[1563273025.589, '9.337890625'],
[1563273085.589, '9.34765625'],
[1563273145.589, '9.337890625'],
[1563273205.589, '9.337890625'],
[1563273265.589, '9.337890625'],
[1563273325.589, '9.337890625'],
[1563273385.589, '9.337890625'],
[1563273445.589, '9.337890625'],
[1563273505.589, '9.337890625'],
[1563273565.589, '9.337890625'],
[1563273625.589, '9.337890625'],
[1563273685.589, '9.337890625'],
[1563273745.589, '9.337890625'],
[1563273805.589, '9.337890625'],
[1563273865.589, '9.390625'],
[1563273925.589, '9.390625'],
],
},
],
result: metricsResult,
};
export const mockedQueryResultFixture = {
// First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
metricId: 'undefined_response_metrics_nginx_ingress_throughput_status_code',
result: metricsResult,
};
export const mockedQueryResultFixtureStatusCode = {
metricId: 'undefined_response_metrics_nginx_ingress_latency_pod_average',
result: metricsResult,
};
 
const extraEnvironmentData = new Array(15).fill(null).map((_, idx) => ({
Loading
Loading
Loading
Loading
@@ -4,11 +4,16 @@ import * as types from '~/monitoring/stores/mutation_types';
import { metricStates } from '~/monitoring/constants';
import {
environmentData,
metricsDashboardPayload,
mockedEmptyResult,
mockedQueryResultPayload,
mockedQueryResultPayloadCoresTotal,
mockedEmptyThroughputResult,
mockedQueryResultFixture,
mockedQueryResultFixtureStatusCode,
} from '../mock_data';
import { getJSONFixture } from '../../helpers/fixtures';
const metricsDashboardFixture = getJSONFixture(
'metrics_dashboard/environment_metrics_dashboard.json',
);
const metricsDashboardPayload = metricsDashboardFixture.dashboard;
 
describe('Monitoring store Getters', () => {
describe('getMetricStates', () => {
Loading
Loading
@@ -55,14 +60,14 @@ describe('Monitoring store Getters', () => {
 
it('on an empty metric with no result, returns NO_DATA', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyResult);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
 
expect(getMetricStates()).toEqual([metricStates.NO_DATA]);
});
 
it('on a metric with a result, returns OK', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
 
expect(getMetricStates()).toEqual([metricStates.OK]);
});
Loading
Loading
@@ -78,8 +83,8 @@ describe('Monitoring store Getters', () => {
 
it('on multiple metrics with results, returns OK', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
 
expect(getMetricStates()).toEqual([metricStates.OK]);
 
Loading
Loading
@@ -110,7 +115,7 @@ describe('Monitoring store Getters', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
 
// An success in 1 group
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
// An error in 2 groups
mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
metricId: groups[0].panels[1].metrics[0].metricId,
Loading
Loading
@@ -176,38 +181,38 @@ describe('Monitoring store Getters', () => {
 
it('an empty metric, returns empty', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyResult);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedEmptyThroughputResult);
 
expect(metricsWithData()).toEqual([]);
});
 
it('a metric with results, it returns a metric', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
 
expect(metricsWithData()).toEqual([mockedQueryResultPayload.metricId]);
expect(metricsWithData()).toEqual([mockedQueryResultFixture.metricId]);
});
 
it('multiple metrics with results, it return multiple metrics', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
 
expect(metricsWithData()).toEqual([
mockedQueryResultPayload.metricId,
mockedQueryResultPayloadCoresTotal.metricId,
mockedQueryResultFixture.metricId,
mockedQueryResultFixtureStatusCode.metricId,
]);
});
 
it('multiple metrics with results, it returns metrics filtered by group', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](state, metricsDashboardPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayload);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultPayloadCoresTotal);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixture);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, mockedQueryResultFixtureStatusCode);
 
// First group has metrics
expect(metricsWithData(state.dashboard.panelGroups[0].key)).toEqual([
mockedQueryResultPayload.metricId,
mockedQueryResultPayloadCoresTotal.metricId,
mockedQueryResultFixture.metricId,
mockedQueryResultFixtureStatusCode.metricId,
]);
 
// Second group has no metrics
Loading
Loading
Loading
Loading
@@ -5,7 +5,13 @@ import * as types from '~/monitoring/stores/mutation_types';
import state from '~/monitoring/stores/state';
import { metricStates } from '~/monitoring/constants';
 
import { metricsDashboardPayload, deploymentData, dashboardGitResponse } from '../mock_data';
import { deploymentData, dashboardGitResponse } from '../mock_data';
import { getJSONFixture } from '../../helpers/fixtures';
const metricsDashboardFixture = getJSONFixture(
'metrics_dashboard/environment_metrics_dashboard.json',
);
const metricsDashboardPayload = metricsDashboardFixture.dashboard;
 
describe('Monitoring mutations', () => {
let stateCopy;
Loading
Loading
@@ -26,32 +32,31 @@ describe('Monitoring mutations', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
const groups = getGroups();
 
expect(groups[0].key).toBe('system-metrics-kubernetes-0');
expect(groups[1].key).toBe('response-metrics-nginx-ingress-vts-1');
expect(groups[0].key).toBe('response-metrics-nginx-ingress-vts-0');
expect(groups[1].key).toBe('response-metrics-nginx-ingress-1');
});
it('normalizes values', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
const expectedLabel = 'Pod average';
const expectedLabel = '5xx Errors (%)';
 
const { label, queryRange } = getGroups()[0].panels[2].metrics[0];
expect(label).toEqual(expectedLabel);
expect(queryRange.length).toBeGreaterThan(0);
});
it('contains two groups, with panels with a metric each', () => {
it('contains six groups, with panels with a metric each', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
 
const groups = getGroups();
 
expect(groups).toBeDefined();
expect(groups).toHaveLength(2);
expect(groups).toHaveLength(6);
 
expect(groups[0].panels).toHaveLength(4);
expect(groups[0].panels).toHaveLength(3);
expect(groups[0].panels[0].metrics).toHaveLength(1);
expect(groups[0].panels[1].metrics).toHaveLength(1);
expect(groups[0].panels[2].metrics).toHaveLength(1);
expect(groups[0].panels[3].metrics).toHaveLength(5);
 
expect(groups[1].panels).toHaveLength(1);
expect(groups[1].panels).toHaveLength(3);
expect(groups[1].panels[0].metrics).toHaveLength(1);
});
it('assigns metrics a metric id', () => {
Loading
Loading
@@ -60,10 +65,10 @@ describe('Monitoring mutations', () => {
const groups = getGroups();
 
expect(groups[0].panels[0].metrics[0].metricId).toEqual(
'12_system_metrics_kubernetes_container_memory_total',
'undefined_response_metrics_nginx_ingress_throughput_status_code',
);
expect(groups[1].panels[0].metrics[0].metricId).toEqual(
'1_response_metrics_nginx_ingress_throughput_status_code',
'undefined_response_metrics_nginx_ingress_16_throughput_status_code',
);
});
});
Loading
Loading
@@ -123,7 +128,7 @@ describe('Monitoring mutations', () => {
});
 
describe('Individual panel/metric results', () => {
const metricId = '12_system_metrics_kubernetes_container_memory_total';
const metricId = 'undefined_response_metrics_nginx_ingress_throughput_status_code';
const result = [
{
values: [[0, 1], [1, 1], [1, 3]],
Loading
Loading
Loading
Loading
@@ -8,11 +8,21 @@ describe('Wikis', () => {
}">
<input type="text" id="wiki_title" value="My title" />
<input type="text" id="wiki_message" />
</form>`;
<select class="form-control select-control" name="wiki[format]" id="wiki_format">
<option value="markdown">Markdown</option>
<option selected="selected" value="rdoc">RDoc</option>
<option value="asciidoc">AsciiDoc</option>
<option value="org">Org</option>
</select>
<code class="js-markup-link-example">{Link title}[link:page-slug]</code>
</form>
`;
 
let wikis;
let titleInput;
let messageInput;
let changeFormatSelect;
let linkExample;
 
describe('when the wiki page is being created', () => {
const formHtmlFixture = editFormHtmlFixture({ newPage: true });
Loading
Loading
@@ -22,6 +32,8 @@ describe('Wikis', () => {
 
titleInput = document.getElementById('wiki_title');
messageInput = document.getElementById('wiki_message');
changeFormatSelect = document.querySelector('#wiki_format');
linkExample = document.querySelector('.js-markup-link-example');
wikis = new Wikis();
});
 
Loading
Loading
@@ -69,6 +81,19 @@ describe('Wikis', () => {
 
expect(messageInput.value).toEqual('Update My title');
});
it.each`
value | text
${'markdown'} | ${'[Link Title](page-slug)'}
${'rdoc'} | ${'{Link title}[link:page-slug]'}
${'asciidoc'} | ${'link:page-slug[Link title]'}
${'org'} | ${'[[page-slug]]'}
`('updates a message when value=$value is selected', ({ value, text }) => {
changeFormatSelect.value = value;
changeFormatSelect.dispatchEvent(new Event('change'));
expect(linkExample.innerHTML).toBe(text);
});
});
});
});
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Database::ConnectionTimer do
let(:current_clock_value) { 1234.56 }
before do
allow(described_class).to receive(:current_clock_value).and_return(current_clock_value)
end
describe '.starting_now' do
let(:default_interval) { described_class::DEFAULT_INTERVAL }
let(:random_value) { 120 }
before do
allow(described_class).to receive(:rand).and_return(random_value)
end
context 'when the configured interval is positive' do
before do
allow(described_class).to receive(:interval).and_return(default_interval)
end
it 'randomizes the interval of the created timer' do
timer = described_class.starting_now
expect(timer.interval).to eq(default_interval + random_value)
end
end
context 'when the configured interval is not positive' do
before do
allow(described_class).to receive(:interval).and_return(0)
end
it 'sets the interval of the created timer to nil' do
timer = described_class.starting_now
expect(timer.interval).to be_nil
end
end
end
describe '.expired?' do
context 'when the interval is positive' do
context 'when the interval has elapsed' do
it 'returns true' do
timer = described_class.new(20, current_clock_value - 30)
expect(timer).to be_expired
end
end
context 'when the interval has not elapsed' do
it 'returns false' do
timer = described_class.new(20, current_clock_value - 10)
expect(timer).not_to be_expired
end
end
end
context 'when the interval is not positive' do
context 'when the interval has elapsed' do
it 'returns false' do
timer = described_class.new(0, current_clock_value - 30)
expect(timer).not_to be_expired
end
end
context 'when the interval has not elapsed' do
it 'returns false' do
timer = described_class.new(0, current_clock_value + 10)
expect(timer).not_to be_expired
end
end
end
context 'when the interval is nil' do
it 'returns false' do
timer = described_class.new(nil, current_clock_value - 30)
expect(timer).not_to be_expired
end
end
end
describe '.reset!' do
it 'updates the timer clock value' do
timer = described_class.new(20, current_clock_value - 20)
expect(timer.starting_clock_value).not_to eql(current_clock_value)
timer.reset!
expect(timer.starting_clock_value).to eql(current_clock_value)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin do
describe 'checking in a connection to the pool' do
let(:model) do
Class.new(ActiveRecord::Base) do
self.abstract_class = true
def self.name
'ForceDisconnectTestModel'
end
end
end
let(:config) { Rails.application.config_for(:database).merge(pool: 1) }
let(:pool) { model.establish_connection(config) }
it 'calls the force disconnect callback on checkin' do
connection = pool.connection
expect(pool.active_connection?).to be_truthy
expect(connection).to receive(:force_disconnect_if_old!).and_call_original
model.clear_active_connections!
end
end
describe 'disconnecting from the database' do
let(:connection) { ActiveRecord::Base.connection_pool.connection }
let(:timer) { connection.force_disconnect_timer }
context 'when the timer is expired' do
it 'disconnects from the database' do
allow(timer).to receive(:expired?).and_return(true)
expect(connection).to receive(:disconnect!).and_call_original
expect(timer).to receive(:reset!).and_call_original
connection.force_disconnect_if_old!
end
end
context 'when the timer is not expired' do
it 'does not disconnect from the database' do
allow(timer).to receive(:expired?).and_return(false)
expect(connection).not_to receive(:disconnect!)
expect(timer).not_to receive(:reset!)
connection.force_disconnect_if_old!
end
end
end
end
Loading
Loading
@@ -9,7 +9,7 @@ describe GitlabDanger do
 
describe '.local_warning_message' do
it 'returns an informational message with rules that can run' do
expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changes_size, documentation, frozen_string, duplicate_yarn_dependencies, prettier, eslint, karma, database, commit_messages')
expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changes_size, documentation, frozen_string, duplicate_yarn_dependencies, prettier, eslint, karma, database, commit_messages, telemetry')
end
end
 
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200304160801_delete_template_services_duplicated_by_type.rb')
describe DeleteTemplateServicesDuplicatedByType, :migration do
let(:services) { table(:services) }
before do
services.create!(template: true, type: 'JenkinsService')
services.create!(template: true, type: 'JenkinsService')
services.create!(template: true, type: 'JiraService')
services.create!(template: true, type: 'JenkinsService')
end
it 'deletes service templates duplicated by type except the one with the lowest ID' do
jenkins_service_id = services.where(type: 'JenkinsService').order(:id).pluck(:id).first
jira_service_id = services.where(type: 'JiraService').pluck(:id).first
migrate!
expect(services.pluck(:id)).to contain_exactly(jenkins_service_id, jira_service_id)
end
end
Loading
Loading
@@ -17,6 +17,16 @@ describe Service do
expect(build(:service, project_id: nil, template: true)).to be_valid
expect(build(:service, project_id: nil, template: false)).to be_invalid
end
context 'with an existing service template' do
before do
create(:service, type: 'Service', template: true)
end
it 'validates only one service template per type' do
expect(build(:service, type: 'Service', template: true)).to be_invalid
end
end
end
 
describe 'Scopes' do
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment