Skip to content
Snippets Groups Projects
Commit a210c43e authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent c9687bdf
No related branches found
No related tags found
No related merge requests found
Showing
with 2051 additions and 104 deletions
Loading
Loading
@@ -5,6 +5,7 @@ export default [
size: '3',
clusterType: 'group_type',
status: 'disabled',
cpu: '6 (100% free)',
memory: '22.50 (30% free)',
},
{
Loading
Loading
@@ -13,6 +14,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'unreachable',
cpu: '3 (50% free)',
memory: '11 (60% free)',
},
{
Loading
Loading
@@ -21,6 +23,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'authentication_failure',
cpu: '1 (0% free)',
memory: '22 (33% free)',
},
{
Loading
Loading
@@ -29,6 +32,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'deleting',
cpu: '6 (100% free)',
memory: '45 (15% free)',
},
{
Loading
Loading
@@ -37,6 +41,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'connected',
cpu: '6 (100% free)',
memory: '20.12 (35% free)',
},
];
Loading
Loading
@@ -7,22 +7,22 @@ import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue
import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_search_input.vue';
 
describe('ClusterFormDropdown', () => {
let vm;
let wrapper;
const firstItem = { name: 'item 1', value: 1 };
const secondItem = { name: 'item 2', value: 2 };
const items = [firstItem, secondItem, { name: 'item 3', value: 3 }];
 
beforeEach(() => {
vm = shallowMount(ClusterFormDropdown);
wrapper = shallowMount(ClusterFormDropdown);
});
afterEach(() => vm.destroy());
afterEach(() => wrapper.destroy());
 
describe('when initial value is provided', () => {
it('sets selectedItem to initial value', () => {
vm.setProps({ items, value: secondItem.value });
wrapper.setProps({ items, value: secondItem.value });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
});
});
});
Loading
Loading
@@ -31,28 +31,29 @@ describe('ClusterFormDropdown', () => {
it('displays placeholder text', () => {
const placeholder = 'placeholder';
 
vm.setProps({ placeholder });
wrapper.setProps({ placeholder });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find(DropdownButton).props('toggleText')).toEqual(placeholder);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(placeholder);
});
});
});
 
describe('when an item is selected', () => {
beforeEach(() => {
vm.setProps({ items });
wrapper.setProps({ items });
 
return vm.vm.$nextTick().then(() => {
vm.findAll('.js-dropdown-item')
return wrapper.vm.$nextTick().then(() => {
wrapper
.findAll('.js-dropdown-item')
.at(1)
.trigger('click');
return vm.vm.$nextTick();
return wrapper.vm.$nextTick();
});
});
 
it('emits input event with selected item', () => {
expect(vm.emitted('input')[0]).toEqual([secondItem.value]);
expect(wrapper.emitted('input')[0]).toEqual([secondItem.value]);
});
});
 
Loading
Loading
@@ -60,37 +61,54 @@ describe('ClusterFormDropdown', () => {
const value = [1];
 
beforeEach(() => {
vm.setProps({ items, multiple: true, value });
return vm.vm
wrapper.setProps({ items, multiple: true, value });
return wrapper.vm
.$nextTick()
.then(() => {
vm.findAll('.js-dropdown-item')
wrapper
.findAll('.js-dropdown-item')
.at(0)
.trigger('click');
return vm.vm.$nextTick();
return wrapper.vm.$nextTick();
})
.then(() => {
vm.findAll('.js-dropdown-item')
wrapper
.findAll('.js-dropdown-item')
.at(1)
.trigger('click');
return vm.vm.$nextTick();
return wrapper.vm.$nextTick();
});
});
 
it('emits input event with an array of selected items', () => {
expect(vm.emitted('input')[1]).toEqual([[firstItem.value, secondItem.value]]);
expect(wrapper.emitted('input')[1]).toEqual([[firstItem.value, secondItem.value]]);
});
});
 
describe('when multiple items can be selected', () => {
beforeEach(() => {
vm.setProps({ items, multiple: true, value: firstItem.value });
return vm.vm.$nextTick();
wrapper.setProps({ items, multiple: true, value: firstItem.value });
return wrapper.vm.$nextTick();
});
 
it('displays a checked GlIcon next to the item', () => {
expect(vm.find(GlIcon).is('.invisible')).toBe(false);
expect(vm.find(GlIcon).props('name')).toBe('mobile-issue-close');
expect(wrapper.find(GlIcon).is('.invisible')).toBe(false);
expect(wrapper.find(GlIcon).props('name')).toBe('mobile-issue-close');
});
});
describe('when multiple values can be selected and initial value is null', () => {
it('emits input event with an array of a single selected item', () => {
wrapper.setProps({ items, multiple: true, value: null });
return wrapper.vm.$nextTick().then(() => {
wrapper
.findAll('.js-dropdown-item')
.at(0)
.trigger('click');
expect(wrapper.emitted('input')[0]).toEqual([[firstItem.value]]);
});
});
});
 
Loading
Loading
@@ -101,20 +119,20 @@ describe('ClusterFormDropdown', () => {
const currentValue = 1;
const customLabelItems = [{ [labelProperty]: label, value: currentValue }];
 
vm.setProps({ labelProperty, items: customLabelItems, value: currentValue });
wrapper.setProps({ labelProperty, items: customLabelItems, value: currentValue });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find(DropdownButton).props('toggleText')).toEqual(label);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(label);
});
});
});
 
describe('when loading', () => {
it('dropdown button isLoading', () => {
vm.setProps({ loading: true });
wrapper.setProps({ loading: true });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find(DropdownButton).props('isLoading')).toBe(true);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DropdownButton).props('isLoading')).toBe(true);
});
});
});
Loading
Loading
@@ -123,20 +141,20 @@ describe('ClusterFormDropdown', () => {
it('uses loading text as toggle button text', () => {
const loadingText = 'loading text';
 
vm.setProps({ loading: true, loadingText });
wrapper.setProps({ loading: true, loadingText });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find(DropdownButton).props('toggleText')).toEqual(loadingText);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(loadingText);
});
});
});
 
describe('when disabled', () => {
it('dropdown button isDisabled', () => {
vm.setProps({ disabled: true });
wrapper.setProps({ disabled: true });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find(DropdownButton).props('isDisabled')).toBe(true);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DropdownButton).props('isDisabled')).toBe(true);
});
});
});
Loading
Loading
@@ -145,20 +163,20 @@ describe('ClusterFormDropdown', () => {
it('uses disabled text as toggle button text', () => {
const disabledText = 'disabled text';
 
vm.setProps({ disabled: true, disabledText });
wrapper.setProps({ disabled: true, disabledText });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find(DropdownButton).props('toggleText')).toBe(disabledText);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DropdownButton).props('toggleText')).toBe(disabledText);
});
});
});
 
describe('when has errors', () => {
it('sets border-danger class selector to dropdown toggle', () => {
vm.setProps({ hasErrors: true });
wrapper.setProps({ hasErrors: true });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find(DropdownButton).classes('border-danger')).toBe(true);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DropdownButton).classes('border-danger')).toBe(true);
});
});
});
Loading
Loading
@@ -167,10 +185,10 @@ describe('ClusterFormDropdown', () => {
it('displays error message', () => {
const errorMessage = 'error message';
 
vm.setProps({ hasErrors: true, errorMessage });
wrapper.setProps({ hasErrors: true, errorMessage });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
});
});
});
Loading
Loading
@@ -179,10 +197,10 @@ describe('ClusterFormDropdown', () => {
it('displays empty text', () => {
const emptyText = 'error message';
 
vm.setProps({ items: [], emptyText });
wrapper.setProps({ items: [], emptyText });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find('.js-empty-text').text()).toEqual(emptyText);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find('.js-empty-text').text()).toEqual(emptyText);
});
});
});
Loading
Loading
@@ -190,34 +208,36 @@ describe('ClusterFormDropdown', () => {
it('displays search field placeholder', () => {
const searchFieldPlaceholder = 'Placeholder';
 
vm.setProps({ searchFieldPlaceholder });
wrapper.setProps({ searchFieldPlaceholder });
 
return vm.vm.$nextTick().then(() => {
expect(vm.find(DropdownSearchInput).props('placeholderText')).toEqual(searchFieldPlaceholder);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(DropdownSearchInput).props('placeholderText')).toEqual(
searchFieldPlaceholder,
);
});
});
 
it('it filters results by search query', () => {
const searchQuery = secondItem.name;
 
vm.setProps({ items });
vm.setData({ searchQuery });
wrapper.setProps({ items });
wrapper.setData({ searchQuery });
 
return vm.vm.$nextTick().then(() => {
expect(vm.findAll('.js-dropdown-item').length).toEqual(1);
expect(vm.find('.js-dropdown-item').text()).toEqual(secondItem.name);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.findAll('.js-dropdown-item').length).toEqual(1);
expect(wrapper.find('.js-dropdown-item').text()).toEqual(secondItem.name);
});
});
 
it('focuses dropdown search input when dropdown is displayed', () => {
const dropdownEl = vm.find('.dropdown').element;
const dropdownEl = wrapper.find('.dropdown').element;
 
expect(vm.find(DropdownSearchInput).props('focused')).toBe(false);
expect(wrapper.find(DropdownSearchInput).props('focused')).toBe(false);
 
$(dropdownEl).trigger('shown.bs.dropdown');
 
return vm.vm.$nextTick(() => {
expect(vm.find(DropdownSearchInput).props('focused')).toBe(true);
return wrapper.vm.$nextTick(() => {
expect(wrapper.find(DropdownSearchInput).props('focused')).toBe(true);
});
});
});
import Vue from 'vue';
import { GlDropdown, GlDropdownItem, GlSearchBoxByClick } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
import EnvironmentLogs from '~/logs/components/environment_logs.vue';
import { createStore } from '~/logs/stores';
import { scrollDown } from '~/lib/utils/scroll_utils';
import {
mockEnvName,
mockEnvironments,
mockPods,
mockLogsResult,
mockTrace,
mockPodName,
mockSearch,
mockEnvironmentsEndpoint,
mockDocumentationPath,
} from '../mock_data';
jest.mock('~/lib/utils/scroll_utils');
describe('EnvironmentLogs', () => {
let EnvironmentLogsComponent;
let store;
let wrapper;
let state;
const propsData = {
environmentName: mockEnvName,
environmentsPath: mockEnvironmentsEndpoint,
clusterApplicationsDocumentationPath: mockDocumentationPath,
};
const actionMocks = {
setInitData: jest.fn(),
setSearch: jest.fn(),
showPodLogs: jest.fn(),
showEnvironment: jest.fn(),
fetchEnvironments: jest.fn(),
};
const updateControlBtnsMock = jest.fn();
const findEnvironmentsDropdown = () => wrapper.find('.js-environments-dropdown');
const findPodsDropdown = () => wrapper.find('.js-pods-dropdown');
const findSearchBar = () => wrapper.find('.js-logs-search');
const findTimeRangePicker = () => wrapper.find({ ref: 'dateTimePicker' });
const findInfoAlert = () => wrapper.find('.js-elasticsearch-alert');
const findLogControlButtons = () => wrapper.find({ name: 'log-control-buttons-stub' });
const findLogTrace = () => wrapper.find('.js-log-trace');
const mockSetInitData = () => {
state.pods.options = mockPods;
state.environments.current = mockEnvName;
[state.pods.current] = state.pods.options;
state.logs.isComplete = false;
state.logs.lines = mockLogsResult;
};
const mockShowPodLogs = podName => {
state.pods.options = mockPods;
[state.pods.current] = podName;
state.logs.isComplete = false;
state.logs.lines = mockLogsResult;
};
const mockFetchEnvs = () => {
state.environments.options = mockEnvironments;
};
const initWrapper = () => {
wrapper = shallowMount(EnvironmentLogsComponent, {
propsData,
store,
stubs: {
LogControlButtons: {
name: 'log-control-buttons-stub',
template: '<div/>',
methods: {
update: updateControlBtnsMock,
},
},
},
methods: {
...actionMocks,
},
});
};
beforeEach(() => {
store = createStore();
state = store.state.environmentLogs;
EnvironmentLogsComponent = Vue.extend(EnvironmentLogs);
});
afterEach(() => {
actionMocks.setInitData.mockReset();
actionMocks.showPodLogs.mockReset();
actionMocks.fetchEnvironments.mockReset();
if (wrapper) {
wrapper.destroy();
}
});
it('displays UI elements', () => {
initWrapper();
expect(wrapper.isVueInstance()).toBe(true);
expect(wrapper.isEmpty()).toBe(false);
// top bar
expect(findEnvironmentsDropdown().is(GlDropdown)).toBe(true);
expect(findPodsDropdown().is(GlDropdown)).toBe(true);
expect(findLogControlButtons().exists()).toBe(true);
expect(findSearchBar().exists()).toBe(true);
expect(findSearchBar().is(GlSearchBoxByClick)).toBe(true);
expect(findTimeRangePicker().exists()).toBe(true);
expect(findTimeRangePicker().is(DateTimePicker)).toBe(true);
// log trace
expect(findLogTrace().isEmpty()).toBe(false);
});
it('mounted inits data', () => {
initWrapper();
expect(actionMocks.setInitData).toHaveBeenCalledTimes(1);
expect(actionMocks.setInitData).toHaveBeenLastCalledWith({
timeRange: expect.objectContaining({
default: true,
}),
environmentName: mockEnvName,
podName: null,
});
expect(actionMocks.fetchEnvironments).toHaveBeenCalledTimes(1);
expect(actionMocks.fetchEnvironments).toHaveBeenLastCalledWith(mockEnvironmentsEndpoint);
});
describe('loading state', () => {
beforeEach(() => {
state.pods.options = [];
state.logs = {
lines: [],
isLoading: true,
};
state.environments = {
options: [],
isLoading: true,
};
initWrapper();
});
it('displays a disabled environments dropdown', () => {
expect(findEnvironmentsDropdown().attributes('disabled')).toBe('true');
expect(findEnvironmentsDropdown().findAll(GlDropdownItem).length).toBe(0);
});
it('displays a disabled pods dropdown', () => {
expect(findPodsDropdown().attributes('disabled')).toBe('true');
expect(findPodsDropdown().findAll(GlDropdownItem).length).toBe(0);
});
it('displays a disabled search bar', () => {
expect(findSearchBar().exists()).toBe(true);
expect(findSearchBar().attributes('disabled')).toBe('true');
});
it('displays a disabled time window dropdown', () => {
expect(findTimeRangePicker().attributes('disabled')).toBe('true');
});
it('does not update buttons state', () => {
expect(updateControlBtnsMock).not.toHaveBeenCalled();
});
it('shows a logs trace', () => {
expect(findLogTrace().text()).toBe('');
expect(
findLogTrace()
.find('.js-build-loader-animation')
.isVisible(),
).toBe(true);
});
});
describe('legacy environment', () => {
beforeEach(() => {
state.pods.options = [];
state.logs = {
lines: [],
isLoading: false,
};
state.environments = {
options: mockEnvironments,
current: 'staging',
isLoading: false,
};
initWrapper();
});
it('displays a disabled time window dropdown', () => {
expect(findTimeRangePicker().attributes('disabled')).toBe('true');
});
it('displays a disabled search bar', () => {
expect(findSearchBar().attributes('disabled')).toBe('true');
});
it('displays an alert to upgrade to ES', () => {
expect(findInfoAlert().exists()).toBe(true);
});
});
describe('state with data', () => {
beforeEach(() => {
actionMocks.setInitData.mockImplementation(mockSetInitData);
actionMocks.showPodLogs.mockImplementation(mockShowPodLogs);
actionMocks.fetchEnvironments.mockImplementation(mockFetchEnvs);
initWrapper();
});
afterEach(() => {
scrollDown.mockReset();
updateControlBtnsMock.mockReset();
actionMocks.setInitData.mockReset();
actionMocks.showPodLogs.mockReset();
actionMocks.fetchEnvironments.mockReset();
});
it('displays an enabled search bar', () => {
expect(findSearchBar().attributes('disabled')).toBeFalsy();
// input a query and click `search`
findSearchBar().vm.$emit('input', mockSearch);
findSearchBar().vm.$emit('submit');
expect(actionMocks.setSearch).toHaveBeenCalledTimes(1);
expect(actionMocks.setSearch).toHaveBeenCalledWith(mockSearch);
});
it('displays an enabled time window dropdown', () => {
expect(findTimeRangePicker().attributes('disabled')).toBeFalsy();
});
it('does not display an alert to upgrade to ES', () => {
expect(findInfoAlert().exists()).toBe(false);
});
it('populates environments dropdown', () => {
const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
expect(findEnvironmentsDropdown().props('text')).toBe(mockEnvName);
expect(items.length).toBe(mockEnvironments.length);
mockEnvironments.forEach((env, i) => {
const item = items.at(i);
expect(item.text()).toBe(env.name);
});
});
it('populates pods dropdown', () => {
const items = findPodsDropdown().findAll(GlDropdownItem);
expect(findPodsDropdown().props('text')).toBe(mockPodName);
expect(items.length).toBe(mockPods.length);
mockPods.forEach((pod, i) => {
const item = items.at(i);
expect(item.text()).toBe(pod);
});
});
it('populates logs trace', () => {
const trace = findLogTrace();
expect(trace.text().split('\n').length).toBe(mockTrace.length);
expect(trace.text().split('\n')).toEqual(mockTrace);
});
it('update control buttons state', () => {
expect(updateControlBtnsMock).toHaveBeenCalledTimes(1);
});
it('scrolls to bottom when loaded', () => {
expect(scrollDown).toHaveBeenCalledTimes(1);
});
describe('when user clicks', () => {
it('environment name, trace is refreshed', () => {
const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
const index = 1; // any env
expect(actionMocks.showEnvironment).toHaveBeenCalledTimes(0);
items.at(index).vm.$emit('click');
expect(actionMocks.showEnvironment).toHaveBeenCalledTimes(1);
expect(actionMocks.showEnvironment).toHaveBeenLastCalledWith(mockEnvironments[index].name);
});
it('pod name, trace is refreshed', () => {
const items = findPodsDropdown().findAll(GlDropdownItem);
const index = 2; // any pod
expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(0);
items.at(index).vm.$emit('click');
expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(1);
expect(actionMocks.showPodLogs).toHaveBeenLastCalledWith(mockPods[index]);
});
it('refresh button, trace is refreshed', () => {
expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(0);
findLogControlButtons().vm.$emit('refresh');
expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(1);
expect(actionMocks.showPodLogs).toHaveBeenLastCalledWith(mockPodName);
});
});
});
});
import { shallowMount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import LogControlButtons from '~/logs/components/log_control_buttons.vue';
import {
canScroll,
isScrolledToTop,
isScrolledToBottom,
scrollDown,
scrollUp,
} from '~/lib/utils/scroll_utils';
jest.mock('~/lib/utils/scroll_utils');
describe('LogControlButtons', () => {
let wrapper;
const findScrollToTop = () => wrapper.find('.js-scroll-to-top');
const findScrollToBottom = () => wrapper.find('.js-scroll-to-bottom');
const findRefreshBtn = () => wrapper.find('.js-refresh-log');
const initWrapper = () => {
wrapper = shallowMount(LogControlButtons);
};
afterEach(() => {
if (wrapper) {
wrapper.destroy();
}
});
it('displays UI elements', () => {
initWrapper();
expect(wrapper.isVueInstance()).toBe(true);
expect(wrapper.isEmpty()).toBe(false);
expect(findScrollToTop().is(GlButton)).toBe(true);
expect(findScrollToBottom().is(GlButton)).toBe(true);
expect(findRefreshBtn().is(GlButton)).toBe(true);
});
it('emits a `refresh` event on click on `refresh` button', () => {
initWrapper();
// An `undefined` value means no event was emitted
expect(wrapper.emitted('refresh')).toBe(undefined);
findRefreshBtn().vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.emitted('refresh')).toHaveLength(1);
});
});
describe('when scrolling actions are enabled', () => {
beforeEach(() => {
// mock scrolled to the middle of a long page
canScroll.mockReturnValue(true);
isScrolledToBottom.mockReturnValue(false);
isScrolledToTop.mockReturnValue(false);
initWrapper();
wrapper.vm.update();
return wrapper.vm.$nextTick();
});
afterEach(() => {
canScroll.mockReset();
isScrolledToTop.mockReset();
isScrolledToBottom.mockReset();
});
it('click on "scroll to top" scrolls up', () => {
expect(findScrollToTop().is('[disabled]')).toBe(false);
findScrollToTop().vm.$emit('click');
expect(scrollUp).toHaveBeenCalledTimes(1);
});
it('click on "scroll to bottom" scrolls down', () => {
expect(findScrollToBottom().is('[disabled]')).toBe(false);
findScrollToBottom().vm.$emit('click');
expect(scrollDown).toHaveBeenCalledTimes(1); // plus one time when trace was loaded
});
});
describe('when scrolling actions are disabled', () => {
beforeEach(() => {
// mock a short page without a scrollbar
canScroll.mockReturnValue(false);
isScrolledToBottom.mockReturnValue(true);
isScrolledToTop.mockReturnValue(true);
initWrapper();
});
it('buttons are disabled', () => {
wrapper.vm.update();
return wrapper.vm.$nextTick(() => {
expect(findScrollToTop().is('[disabled]')).toBe(true);
expect(findScrollToBottom().is('[disabled]')).toBe(true);
});
});
});
});
export const mockProjectPath = 'root/autodevops-deploy';
export const mockEnvName = 'production';
export const mockEnvironmentsEndpoint = `${mockProjectPath}/environments.json`;
export const mockEnvId = '99';
export const mockDocumentationPath = '/documentation.md';
const makeMockEnvironment = (id, name, advancedQuerying) => ({
id,
project_path: mockProjectPath,
name,
logs_api_path: '/dummy_logs_path.json',
enable_advanced_logs_querying: advancedQuerying,
});
export const mockEnvironment = makeMockEnvironment(mockEnvId, mockEnvName, true);
export const mockEnvironments = [
mockEnvironment,
makeMockEnvironment(101, 'staging', false),
makeMockEnvironment(102, 'review/a-feature', false),
];
export const mockPodName = 'production-764c58d697-aaaaa';
export const mockPods = [
mockPodName,
'production-764c58d697-bbbbb',
'production-764c58d697-ccccc',
'production-764c58d697-ddddd',
];
export const mockLogsResult = [
{
timestamp: '2019-12-13T13:43:18.2760123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:18.2760123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:26.8420123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:26.8420123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:28.3710123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:28.3710123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:36.8860123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:36.8860123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:38.4000123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:38.4000123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:46.8420123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:46.8430123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:48.3240123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:48.3250123Z', message: '- -> /' },
];
export const mockTrace = [
'Dec 13 13:43:18.276Z | 10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:18.276Z | - -> /',
'Dec 13 13:43:26.842Z | 10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:26.842Z | - -> /',
'Dec 13 13:43:28.371Z | 10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:28.371Z | - -> /',
'Dec 13 13:43:36.886Z | 10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:36.886Z | - -> /',
'Dec 13 13:43:38.400Z | 10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:38.400Z | - -> /',
'Dec 13 13:43:46.842Z | 10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:46.843Z | - -> /',
'Dec 13 13:43:48.324Z | 10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:48.325Z | - -> /',
];
export const mockSearch = 'foo +bar';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as types from '~/logs/stores/mutation_types';
import { convertToFixedRange } from '~/lib/utils/datetime_range';
import logsPageState from '~/logs/stores/state';
import {
setInitData,
setSearch,
showPodLogs,
fetchEnvironments,
fetchLogs,
} from '~/logs/stores/actions';
import { defaultTimeRange } from '~/monitoring/constants';
import axios from '~/lib/utils/axios_utils';
import flash from '~/flash';
import {
mockProjectPath,
mockPodName,
mockEnvironmentsEndpoint,
mockEnvironments,
mockPods,
mockLogsResult,
mockEnvName,
mockSearch,
} from '../mock_data';
jest.mock('~/flash');
jest.mock('~/lib/utils/datetime_range');
jest.mock('~/logs/utils');
const mockDefaultRange = {
start: '2020-01-10T18:00:00.000Z',
end: '2020-01-10T10:00:00.000Z',
};
const mockFixedRange = {
start: '2020-01-09T18:06:20.000Z',
end: '2020-01-09T18:36:20.000Z',
};
const mockRollingRange = {
duration: 120,
};
const mockRollingRangeAsFixed = {
start: '2020-01-10T18:00:00.000Z',
end: '2020-01-10T17:58:00.000Z',
};
describe('Logs Store actions', () => {
let state;
let mock;
convertToFixedRange.mockImplementation(range => {
if (range === defaultTimeRange) {
return { ...mockDefaultRange };
}
if (range === mockFixedRange) {
return { ...mockFixedRange };
}
if (range === mockRollingRange) {
return { ...mockRollingRangeAsFixed };
}
throw new Error('Invalid time range');
});
beforeEach(() => {
state = logsPageState();
});
afterEach(() => {
flash.mockClear();
});
describe('setInitData', () => {
it('should commit environment and pod name mutation', () =>
testAction(setInitData, { environmentName: mockEnvName, podName: mockPodName }, state, [
{ type: types.SET_PROJECT_ENVIRONMENT, payload: mockEnvName },
{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
]));
});
describe('setSearch', () => {
it('should commit search mutation', () =>
testAction(
setSearch,
mockSearch,
state,
[{ type: types.SET_SEARCH, payload: mockSearch }],
[{ type: 'fetchLogs' }],
));
});
describe('showPodLogs', () => {
it('should commit pod name', () =>
testAction(
showPodLogs,
mockPodName,
state,
[{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName }],
[{ type: 'fetchLogs' }],
));
});
describe('fetchEnvironments', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
});
it('should commit RECEIVE_ENVIRONMENTS_DATA_SUCCESS mutation on correct data', () => {
mock.onGet(mockEnvironmentsEndpoint).replyOnce(200, { environments: mockEnvironments });
return testAction(
fetchEnvironments,
mockEnvironmentsEndpoint,
state,
[
{ type: types.REQUEST_ENVIRONMENTS_DATA },
{ type: types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS, payload: mockEnvironments },
],
[{ type: 'fetchLogs' }],
);
});
it('should commit RECEIVE_ENVIRONMENTS_DATA_ERROR on wrong data', () => {
mock.onGet(mockEnvironmentsEndpoint).replyOnce(500);
return testAction(
fetchEnvironments,
mockEnvironmentsEndpoint,
state,
[
{ type: types.REQUEST_ENVIRONMENTS_DATA },
{ type: types.RECEIVE_ENVIRONMENTS_DATA_ERROR },
],
[],
() => {
expect(flash).toHaveBeenCalledTimes(1);
},
);
});
});
describe('fetchLogs', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.reset();
});
it('should commit logs and pod data when there is pod name defined', () => {
state.environments.options = mockEnvironments;
state.environments.current = mockEnvName;
state.pods.current = mockPodName;
const endpoint = '/dummy_logs_path.json';
mock
.onGet(endpoint, {
params: {
pod_name: mockPodName,
...mockDefaultRange,
},
})
.reply(200, {
pod_name: mockPodName,
pods: mockPods,
logs: mockLogsResult,
});
mock.onGet(endpoint).replyOnce(202); // mock reactive cache
return testAction(
fetchLogs,
null,
state,
[
{ type: types.REQUEST_PODS_DATA },
{ type: types.REQUEST_LOGS_DATA },
{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
{ type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
{ type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
],
[],
);
});
it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
state.projectPath = mockProjectPath;
state.environments.options = mockEnvironments;
state.environments.current = mockEnvName;
state.pods.current = mockPodName;
state.timeRange.current = mockFixedRange;
const endpoint = '/dummy_logs_path.json';
mock
.onGet(endpoint, {
params: {
pod_name: mockPodName,
start: mockFixedRange.start,
end: mockFixedRange.end,
},
})
.reply(200, {
pod_name: mockPodName,
pods: mockPods,
logs: mockLogsResult,
});
return testAction(
fetchLogs,
null,
state,
[
{ type: types.REQUEST_PODS_DATA },
{ type: types.REQUEST_LOGS_DATA },
{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
{ type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
{ type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
],
[],
);
});
it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
state.environments.options = mockEnvironments;
state.environments.current = mockEnvName;
state.pods.current = mockPodName;
state.search = mockSearch;
state.timeRange.current = 'INVALID_TIME_RANGE';
const endpoint = '/dummy_logs_path.json';
mock
.onGet(endpoint, {
params: {
pod_name: mockPodName,
search: mockSearch,
},
})
.reply(200, {
pod_name: mockPodName,
pods: mockPods,
logs: mockLogsResult,
});
mock.onGet(endpoint).replyOnce(202); // mock reactive cache
return testAction(
fetchLogs,
null,
state,
[
{ type: types.REQUEST_PODS_DATA },
{ type: types.REQUEST_LOGS_DATA },
{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
{ type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
{ type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
],
[],
() => {
// Warning about time ranges was issued
expect(flash).toHaveBeenCalledTimes(1);
expect(flash).toHaveBeenCalledWith(expect.any(String), 'warning');
},
);
});
it('should commit logs and pod data when no pod name defined', done => {
state.environments.options = mockEnvironments;
state.environments.current = mockEnvName;
const endpoint = '/dummy_logs_path.json';
mock.onGet(endpoint, { params: { ...mockDefaultRange } }).reply(200, {
pod_name: mockPodName,
pods: mockPods,
logs: mockLogsResult,
});
mock.onGet(endpoint).replyOnce(202); // mock reactive cache
testAction(
fetchLogs,
null,
state,
[
{ type: types.REQUEST_PODS_DATA },
{ type: types.REQUEST_LOGS_DATA },
{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
{ type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
{ type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
],
[],
done,
);
});
it('should commit logs and pod errors when backend fails', () => {
state.environments.options = mockEnvironments;
state.environments.current = mockEnvName;
const endpoint = `/${mockProjectPath}/-/logs/elasticsearch.json?environment_name=${mockEnvName}`;
mock.onGet(endpoint).replyOnce(500);
return testAction(
fetchLogs,
null,
state,
[
{ type: types.REQUEST_PODS_DATA },
{ type: types.REQUEST_LOGS_DATA },
{ type: types.RECEIVE_PODS_DATA_ERROR },
{ type: types.RECEIVE_LOGS_DATA_ERROR },
],
[],
() => {
expect(flash).toHaveBeenCalledTimes(1);
},
);
});
});
});
import * as getters from '~/logs/stores/getters';
import logsPageState from '~/logs/stores/state';
import { mockLogsResult, mockTrace } from '../mock_data';
describe('Logs Store getters', () => {
let state;
beforeEach(() => {
state = logsPageState();
});
describe('trace', () => {
describe('when state is initialized', () => {
it('returns an empty string', () => {
expect(getters.trace(state)).toEqual('');
});
});
describe('when state logs are empty', () => {
beforeEach(() => {
state.logs.lines = [];
});
it('returns an empty string', () => {
expect(getters.trace(state)).toEqual('');
});
});
describe('when state logs are set', () => {
beforeEach(() => {
state.logs.lines = mockLogsResult;
});
it('returns an empty string', () => {
expect(getters.trace(state)).toEqual(mockTrace.join('\n'));
});
});
});
});
import mutations from '~/logs/stores/mutations';
import * as types from '~/logs/stores/mutation_types';
import logsPageState from '~/logs/stores/state';
import {
mockEnvName,
mockEnvironments,
mockPods,
mockPodName,
mockLogsResult,
mockSearch,
} from '../mock_data';
describe('Logs Store Mutations', () => {
let state;
beforeEach(() => {
state = logsPageState();
});
it('ensures mutation types are correctly named', () => {
Object.keys(types).forEach(k => {
expect(k).toEqual(types[k]);
});
});
describe('SET_PROJECT_ENVIRONMENT', () => {
it('sets the environment', () => {
mutations[types.SET_PROJECT_ENVIRONMENT](state, mockEnvName);
expect(state.environments.current).toEqual(mockEnvName);
});
});
describe('SET_SEARCH', () => {
it('sets the search', () => {
mutations[types.SET_SEARCH](state, mockSearch);
expect(state.search).toEqual(mockSearch);
});
});
describe('REQUEST_ENVIRONMENTS_DATA', () => {
it('inits data', () => {
mutations[types.REQUEST_ENVIRONMENTS_DATA](state);
expect(state.environments.options).toEqual([]);
expect(state.environments.isLoading).toEqual(true);
});
});
describe('RECEIVE_ENVIRONMENTS_DATA_SUCCESS', () => {
it('receives environments data and stores it as options', () => {
expect(state.environments.options).toEqual([]);
mutations[types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS](state, mockEnvironments);
expect(state.environments.options).toEqual(mockEnvironments);
expect(state.environments.isLoading).toEqual(false);
});
});
describe('RECEIVE_ENVIRONMENTS_DATA_ERROR', () => {
it('captures an error loading environments', () => {
mutations[types.RECEIVE_ENVIRONMENTS_DATA_ERROR](state);
expect(state.environments).toEqual({
options: [],
isLoading: false,
current: null,
});
});
});
describe('REQUEST_LOGS_DATA', () => {
it('starts loading for logs', () => {
mutations[types.REQUEST_LOGS_DATA](state);
expect(state.logs).toEqual(
expect.objectContaining({
lines: [],
isLoading: true,
isComplete: false,
}),
);
});
});
describe('RECEIVE_LOGS_DATA_SUCCESS', () => {
it('receives logs lines', () => {
mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, mockLogsResult);
expect(state.logs).toEqual(
expect.objectContaining({
lines: mockLogsResult,
isLoading: false,
isComplete: true,
}),
);
});
});
describe('RECEIVE_LOGS_DATA_ERROR', () => {
it('receives log data error and stops loading', () => {
mutations[types.RECEIVE_LOGS_DATA_ERROR](state);
expect(state.logs).toEqual(
expect.objectContaining({
lines: [],
isLoading: false,
isComplete: true,
}),
);
});
});
describe('SET_CURRENT_POD_NAME', () => {
it('set current pod name', () => {
mutations[types.SET_CURRENT_POD_NAME](state, mockPodName);
expect(state.pods.current).toEqual(mockPodName);
});
});
describe('SET_TIME_RANGE', () => {
it('sets a default range', () => {
expect(state.timeRange.current).toEqual(expect.any(Object));
});
it('sets a time range', () => {
const mockRange = {
start: '2020-01-10T18:00:00.000Z',
end: '2020-01-10T10:00:00.000Z',
};
mutations[types.SET_TIME_RANGE](state, mockRange);
expect(state.timeRange.current).toEqual(mockRange);
});
});
describe('REQUEST_PODS_DATA', () => {
it('receives log data error and stops loading', () => {
mutations[types.REQUEST_PODS_DATA](state);
expect(state.pods).toEqual(
expect.objectContaining({
options: [],
}),
);
});
});
describe('RECEIVE_PODS_DATA_SUCCESS', () => {
it('receives pods data success', () => {
mutations[types.RECEIVE_PODS_DATA_SUCCESS](state, mockPods);
expect(state.pods).toEqual(
expect.objectContaining({
options: mockPods,
}),
);
});
});
describe('RECEIVE_PODS_DATA_ERROR', () => {
it('receives pods data error', () => {
mutations[types.RECEIVE_PODS_DATA_ERROR](state);
expect(state.pods).toEqual(
expect.objectContaining({
options: [],
}),
);
});
});
});
import { getTimeRange } from '~/logs/utils';
describe('logs/utils', () => {
describe('getTimeRange', () => {
const nowTimestamp = 1577836800000;
const nowString = '2020-01-01T00:00:00.000Z';
beforeEach(() => {
jest.spyOn(Date, 'now').mockImplementation(() => nowTimestamp);
});
afterEach(() => {
Date.now.mockRestore();
});
it('returns the right values', () => {
expect(getTimeRange(0)).toEqual({
start: '2020-01-01T00:00:00.000Z',
end: nowString,
});
expect(getTimeRange(60 * 30)).toEqual({
start: '2019-12-31T23:30:00.000Z',
end: nowString,
});
expect(getTimeRange(60 * 60 * 24 * 7 * 1)).toEqual({
start: '2019-12-25T00:00:00.000Z',
end: nowString,
});
expect(getTimeRange(60 * 60 * 24 * 7 * 4)).toEqual({
start: '2019-12-04T00:00:00.000Z',
end: nowString,
});
});
});
});
// this file can't be migrated to jest because it relies on the browser to perform integration tests:
// see: https://gitlab.com/gitlab-org/gitlab/-/issues/194207#note_301878738
import { FIXTURES_PATH } from 'spec/test_constants';
import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer';
 
Loading
Loading
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2020_02_26_162723 do
let(:gitlab_shell) { Gitlab::Shell.new }
let(:users) { table(:users) }
let(:snippets) { table(:snippets) }
let(:snippet_repositories) { table(:snippet_repositories) }
let(:user) { users.create(id: 1, email: 'user@example.com', projects_limit: 10, username: 'test', name: 'Test') }
let!(:snippet_with_repo) { snippets.create(id: 1, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
let!(:snippet_with_empty_repo) { snippets.create(id: 2, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
let!(:snippet_without_repo) { snippets.create(id: 3, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
let(:file_name) { 'file_name.rb' }
let(:content) { 'content' }
let(:ids) { snippets.pluck('MIN(id)', 'MAX(id)').first }
let(:service) { described_class.new }
subject { service.perform(*ids) }
before do
allow(snippet_with_repo).to receive(:disk_path).and_return(disk_path(snippet_with_repo))
TestEnv.copy_repo(snippet_with_repo,
bare_repo: TestEnv.factory_repo_path_bare,
refs: TestEnv::BRANCH_SHA)
raw_repository(snippet_with_empty_repo).create_repository
end
after do
raw_repository(snippet_with_repo).remove
raw_repository(snippet_without_repo).remove
raw_repository(snippet_with_empty_repo).remove
end
describe '#perform' do
it 'logs successful migrated snippets' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:info).exactly(3).times
end
subject
end
context 'when snippet has a non empty repository' do
it 'does not perform any action' do
expect(service).not_to receive(:create_repository_and_files).with(snippet_with_repo)
subject
end
end
shared_examples 'commits the file to the repository' do
it do
subject
blob = blob_at(snippet, file_name)
aggregate_failures do
expect(blob).to be
expect(blob.data).to eq content
end
end
end
context 'when snippet has an empty repo' do
before do
expect(repository_exists?(snippet_with_empty_repo)).to be_truthy
end
it_behaves_like 'commits the file to the repository' do
let(:snippet) { snippet_with_empty_repo }
end
end
context 'when snippet does not have a repository' do
it 'creates the repository' do
expect { subject }.to change { repository_exists?(snippet_without_repo) }.from(false).to(true)
end
it_behaves_like 'commits the file to the repository' do
let(:snippet) { snippet_without_repo }
end
end
context 'when an error is raised' do
before do
allow(service).to receive(:create_commit).and_raise(StandardError)
end
it 'logs errors' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
expect(instance).to receive(:error).exactly(3).times
end
subject
end
it "retries #{described_class::MAX_RETRIES} times the operation if it fails" do
expect(service).to receive(:create_commit).exactly(snippets.count * described_class::MAX_RETRIES).times
subject
end
it 'destroys the snippet repository' do
expect(service).to receive(:destroy_snippet_repository).exactly(3).times.and_call_original
subject
expect(snippet_repositories.count).to eq 0
end
it 'deletes the repository on disk' do
subject
aggregate_failures do
expect(repository_exists?(snippet_with_repo)).to be_falsey
expect(repository_exists?(snippet_without_repo)).to be_falsey
expect(repository_exists?(snippet_with_empty_repo)).to be_falsey
end
end
end
end
def blob_at(snippet, path)
raw_repository(snippet).blob_at('master', path)
end
def repository_exists?(snippet)
gitlab_shell.repository_exists?('default', "#{disk_path(snippet)}.git")
end
def raw_repository(snippet)
Gitlab::Git::Repository.new('default',
"#{disk_path(snippet)}.git",
Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
"@snippets/#{snippet.id}")
end
def hashed_repository(snippet)
Storage::Hashed.new(snippet, prefix: '@snippets')
end
def disk_path(snippet)
hashed_repository(snippet).disk_path
end
def ls_files(snippet)
raw_repository(snippet).ls_files(nil)
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Elasticsearch::Logs do
let(:client) { Elasticsearch::Transport::Client }
let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } }
let(:es_message_2) { { timestamp: "2019-12-13T14:35:35.034Z", message: "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" } }
let(:es_message_3) { { timestamp: "2019-12-13T14:35:36.034Z", message: "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } }
let(:es_message_4) { { timestamp: "2019-12-13T14:35:37.034Z", message: "- -\u003e /" } }
let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/logs_response.json')) }
subject { described_class.new(client) }
let(:namespace) { "autodevops-deploy-9-production" }
let(:pod_name) { "production-6866bc8974-m4sk4" }
let(:container_name) { "auto-deploy-app" }
let(:search) { "foo +bar "}
let(:start_time) { "2019-12-13T14:35:34.034Z" }
let(:end_time) { "2019-12-13T14:35:34.034Z" }
let(:body) { JSON.parse(fixture_file('lib/elasticsearch/query.json')) }
let(:body_with_container) { JSON.parse(fixture_file('lib/elasticsearch/query_with_container.json')) }
let(:body_with_search) { JSON.parse(fixture_file('lib/elasticsearch/query_with_search.json')) }
let(:body_with_times) { JSON.parse(fixture_file('lib/elasticsearch/query_with_times.json')) }
let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
RSpec::Matchers.define :a_hash_equal_to_json do |expected|
match do |actual|
actual.as_json == expected
end
end
describe '#pod_logs' do
it 'returns the logs as an array' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
end
it 'can further filter the logs by container name' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_container)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, container_name)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
end
it 'can further filter the logs by search' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_search)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, search)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
end
it 'can further filter the logs by start_time and end_time' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_times)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, nil, start_time, end_time)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
end
it 'can further filter the logs by only start_time' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_start_time)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, nil, start_time)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
end
it 'can further filter the logs by only end_time' do
expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_end_time)).and_return(es_response)
result = subject.pod_logs(namespace, pod_name, nil, nil, nil, end_time)
expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
end
end
end
Loading
Loading
@@ -1266,4 +1266,39 @@ describe Environment, :use_clean_rails_memory_store_caching do
expect(env).to be_persisted
end
end
describe '#elastic_stack_available?' do
let!(:cluster) { create(:cluster, :project, :provided_by_user, projects: [project]) }
let!(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
context 'when app does not exist' do
it 'returns false' do
expect(environment.elastic_stack_available?).to be(false)
end
end
context 'when app exists' do
let!(:application) { create(:clusters_applications_elastic_stack, cluster: cluster) }
it 'returns false' do
expect(environment.elastic_stack_available?).to be(false)
end
end
context 'when app is installed' do
let!(:application) { create(:clusters_applications_elastic_stack, :installed, cluster: cluster) }
it 'returns true' do
expect(environment.elastic_stack_available?).to be(true)
end
end
context 'when app is updated' do
let!(:application) { create(:clusters_applications_elastic_stack, :updated, cluster: cluster) }
it 'returns true' do
expect(environment.elastic_stack_available?).to be(true)
end
end
end
end
Loading
Loading
@@ -26,44 +26,6 @@ describe SnippetRepository do
end
end
 
describe '#create_file' do
let(:snippet) { create(:personal_snippet, :empty_repo, author: user) }
it 'creates the file' do
snippet_repository.create_file(user, 'foo', 'bar', commit_opts)
blob = first_blob(snippet)
aggregate_failures do
expect(blob).not_to be_nil
expect(blob.path).to eq 'foo'
expect(blob.data).to eq 'bar'
end
end
it 'fills the file path if empty' do
snippet_repository.create_file(user, nil, 'bar', commit_opts)
blob = first_blob(snippet)
aggregate_failures do
expect(blob).not_to be_nil
expect(blob.path).to eq 'snippetfile1.txt'
expect(blob.data).to eq 'bar'
end
end
context 'when the file exists' do
let(:snippet) { create(:personal_snippet, :repository, author: user) }
it 'captures the git exception and raises a SnippetRepository::CommitError' do
existing_blob = first_blob(snippet)
expect do
snippet_repository.create_file(user, existing_blob.path, existing_blob.data, commit_opts)
end.to raise_error described_class::CommitError
end
end
end
describe '#multi_files_action' do
let(:new_file) { { file_path: 'new_file_test', content: 'bar' } }
let(:move_file) { { previous_path: 'CHANGELOG', file_path: 'CHANGELOG_new', content: 'bar' } }
Loading
Loading
Loading
Loading
@@ -3,6 +3,8 @@
require 'spec_helper'
 
describe EnvironmentEntity do
include Gitlab::Routing.url_helpers
let(:request) { double('request') }
let(:entity) do
described_class.new(environment, request: spy('request'))
Loading
Loading
@@ -71,4 +73,22 @@ describe EnvironmentEntity do
expect(subject).to include(:cancel_auto_stop_path, :auto_stop_at)
end
end
context 'pod_logs' do
it 'exposes logs keys' do
expect(subject).to include(:logs_path)
expect(subject).to include(:logs_api_path)
expect(subject).to include(:enable_advanced_logs_querying)
end
it 'uses k8s api when ES is not available' do
expect(subject[:logs_api_path]).to eq(k8s_project_logs_path(environment.project, environment_name: environment.name, format: :json))
end
it 'uses ES api when ES is available' do
allow(environment).to receive(:elastic_stack_available?).and_return(true)
expect(subject[:logs_api_path]).to eq(elasticsearch_project_logs_path(environment.project, environment_name: environment.name, format: :json))
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe ::PodLogs::BaseService do
include KubernetesHelpers
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
let(:namespace) { 'autodevops-deploy-9-production' }
let(:pod_name) { 'pod-1' }
let(:container_name) { 'container-0' }
let(:params) { {} }
let(:raw_pods) do
JSON.parse([
kube_pod(name: pod_name)
].to_json, object_class: OpenStruct)
end
subject { described_class.new(cluster, namespace, params: params) }
describe '#initialize' do
let(:params) do
{
'container_name' => container_name,
'another_param' => 'foo'
}
end
it 'filters the parameters' do
expect(subject.cluster).to eq(cluster)
expect(subject.namespace).to eq(namespace)
expect(subject.params).to eq({
'container_name' => container_name
})
expect(subject.params.equal?(params)).to be(false)
end
end
describe '#check_arguments' do
context 'when cluster and namespace are provided' do
it 'returns success' do
result = subject.send(:check_arguments, {})
expect(result[:status]).to eq(:success)
end
end
context 'when cluster is nil' do
let(:cluster) { nil }
it 'returns an error' do
result = subject.send(:check_arguments, {})
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Cluster does not exist')
end
end
context 'when namespace is nil' do
let(:namespace) { nil }
it 'returns an error' do
result = subject.send(:check_arguments, {})
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Namespace is empty')
end
end
context 'when namespace is empty' do
let(:namespace) { '' }
it 'returns an error' do
result = subject.send(:check_arguments, {})
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Namespace is empty')
end
end
end
describe '#check_param_lengths' do
context 'when pod_name and container_name are provided' do
let(:params) do
{
'pod_name' => pod_name,
'container_name' => container_name
}
end
it 'returns success' do
result = subject.send(:check_param_lengths, {})
expect(result[:status]).to eq(:success)
expect(result[:pod_name]).to eq(pod_name)
expect(result[:container_name]).to eq(container_name)
end
end
context 'when pod_name is too long' do
let(:params) do
{
'pod_name' => "a very long string." * 15
}
end
it 'returns an error' do
result = subject.send(:check_param_lengths, {})
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('pod_name cannot be larger than 253 chars')
end
end
context 'when container_name is too long' do
let(:params) do
{
'container_name' => "a very long string." * 15
}
end
it 'returns an error' do
result = subject.send(:check_param_lengths, {})
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('container_name cannot be larger than 253 chars')
end
end
end
describe '#get_raw_pods' do
let(:service) { create(:cluster_platform_kubernetes, :configured) }
it 'returns success with passthrough k8s response' do
stub_kubeclient_pods(namespace)
result = subject.send(:get_raw_pods, {})
expect(result[:status]).to eq(:success)
expect(result[:raw_pods].first).to be_a(Kubeclient::Resource)
end
end
describe '#get_pod_names' do
it 'returns success with a list of pods' do
result = subject.send(:get_pod_names, raw_pods: raw_pods)
expect(result[:status]).to eq(:success)
expect(result[:pods]).to eq([pod_name])
end
end
describe '#check_pod_name' do
it 'returns success if pod_name was specified' do
result = subject.send(:check_pod_name, pod_name: pod_name, pods: [pod_name])
expect(result[:status]).to eq(:success)
expect(result[:pod_name]).to eq(pod_name)
end
it 'returns success if pod_name was not specified but there are pods' do
result = subject.send(:check_pod_name, pod_name: nil, pods: [pod_name])
expect(result[:status]).to eq(:success)
expect(result[:pod_name]).to eq(pod_name)
end
it 'returns error if pod_name was not specified and there are no pods' do
result = subject.send(:check_pod_name, pod_name: nil, pods: [])
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('No pods available')
end
it 'returns error if pod_name was specified but does not exist' do
result = subject.send(:check_pod_name, pod_name: 'another_pod', pods: [pod_name])
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Pod does not exist')
end
end
describe '#check_container_name' do
it 'returns success if container_name was specified' do
result = subject.send(:check_container_name,
container_name: container_name,
pod_name: pod_name,
raw_pods: raw_pods
)
expect(result[:status]).to eq(:success)
expect(result[:container_name]).to eq(container_name)
end
it 'returns success if container_name was not specified and there are containers' do
result = subject.send(:check_container_name,
pod_name: pod_name,
raw_pods: raw_pods
)
expect(result[:status]).to eq(:success)
expect(result[:container_name]).to eq(container_name)
end
it 'returns error if container_name was not specified and there are no containers on the pod' do
raw_pods.first.spec.containers = []
result = subject.send(:check_container_name,
pod_name: pod_name,
raw_pods: raw_pods
)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('No containers available')
end
it 'returns error if container_name was specified but does not exist' do
result = subject.send(:check_container_name,
container_name: 'foo',
pod_name: pod_name,
raw_pods: raw_pods
)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Container does not exist')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe ::PodLogs::ElasticsearchService do
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
let(:namespace) { 'autodevops-deploy-9-production' }
let(:pod_name) { 'pod-1' }
let(:container_name) { 'container-1' }
let(:search) { 'foo -bar' }
let(:start_time) { '2019-01-02T12:13:14+02:00' }
let(:end_time) { '2019-01-03T12:13:14+02:00' }
let(:params) { {} }
let(:expected_logs) do
[
{ message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
{ message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
{ message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
]
end
subject { described_class.new(cluster, namespace, params: params) }
describe '#check_times' do
context 'with start and end provided and valid' do
let(:params) do
{
'start' => start_time,
'end' => end_time
}
end
it 'returns success with times' do
result = subject.send(:check_times, {})
expect(result[:status]).to eq(:success)
expect(result[:start]).to eq(start_time)
expect(result[:end]).to eq(end_time)
end
end
context 'with start and end not provided' do
let(:params) do
{}
end
it 'returns success with nothing else' do
result = subject.send(:check_times, {})
expect(result.keys.length).to eq(1)
expect(result[:status]).to eq(:success)
end
end
context 'with start valid and end invalid' do
let(:params) do
{
'start' => start_time,
'end' => 'invalid date'
}
end
it 'returns error' do
result = subject.send(:check_times, {})
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Invalid start or end time format')
end
end
context 'with start invalid and end valid' do
let(:params) do
{
'start' => 'invalid date',
'end' => end_time
}
end
it 'returns error' do
result = subject.send(:check_times, {})
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Invalid start or end time format')
end
end
end
describe '#check_search' do
context 'with search provided and valid' do
let(:params) do
{
'search' => search
}
end
it 'returns success with search' do
result = subject.send(:check_search, {})
expect(result[:status]).to eq(:success)
expect(result[:search]).to eq(search)
end
end
context 'with search not provided' do
let(:params) do
{}
end
it 'returns success with nothing else' do
result = subject.send(:check_search, {})
expect(result.keys.length).to eq(1)
expect(result[:status]).to eq(:success)
end
end
end
describe '#pod_logs' do
let(:result_arg) do
{
pod_name: pod_name,
container_name: container_name,
search: search,
start: start_time,
end: end_time
}
end
before do
create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
end
it 'returns the logs' do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
.to receive(:pod_logs)
.with(namespace, pod_name, container_name, search, start_time, end_time)
.and_return(expected_logs)
result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:success)
expect(result[:logs]).to eq(expected_logs)
end
it 'returns an error when ES is unreachable' do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(nil)
result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Unable to connect to Elasticsearch')
end
it 'handles server errors from elasticsearch' do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
.to receive(:pod_logs)
.and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe ::PodLogs::KubernetesService do
include KubernetesHelpers
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
let(:namespace) { 'autodevops-deploy-9-production' }
let(:pod_name) { 'pod-1' }
let(:container_name) { 'container-1' }
let(:params) { {} }
let(:raw_logs) do
"2019-12-13T14:04:22.123456Z Log 1\n2019-12-13T14:04:23.123456Z Log 2\n" \
"2019-12-13T14:04:24.123456Z Log 3"
end
subject { described_class.new(cluster, namespace, params: params) }
describe '#pod_logs' do
let(:result_arg) do
{
pod_name: pod_name,
container_name: container_name
}
end
let(:expected_logs) { raw_logs }
let(:service) { create(:cluster_platform_kubernetes, :configured) }
it 'returns the logs' do
stub_kubeclient_logs(pod_name, namespace, container: container_name)
result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:success)
expect(result[:logs]).to eq(expected_logs)
end
it 'handles Not Found errors from k8s' do
allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
.to receive(:get_pod_log)
.with(any_args)
.and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not Found', {}))
result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Pod not found')
end
it 'handles HTTP errors from k8s' do
allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
.to receive(:get_pod_log)
.with(any_args)
.and_raise(Kubeclient::HttpError.new(500, 'Error', {}))
result = subject.send(:pod_logs, result_arg)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Kubernetes API returned status code: 500')
end
end
describe '#encode_logs_to_utf8', :aggregate_failures do
let(:service) { create(:cluster_platform_kubernetes, :configured) }
let(:expected_logs) { '2019-12-13T14:04:22.123456Z ✔ Started logging errors to Sentry' }
let(:raw_logs) { expected_logs.dup.force_encoding(Encoding::ASCII_8BIT) }
let(:result) { subject.send(:encode_logs_to_utf8, result_arg) }
let(:result_arg) do
{
pod_name: pod_name,
container_name: container_name,
logs: raw_logs
}
end
it 'converts logs to utf-8' do
expect(result[:status]).to eq(:success)
expect(result[:logs]).to eq(expected_logs)
end
it 'returns error if output of encoding helper is blank' do
allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return('')
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
end
it 'returns error if output of encoding helper is nil' do
allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return(nil)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
end
it 'returns error if output of encoding helper is not UTF-8' do
allow(Gitlab::EncodingHelper).to receive(:encode_utf8)
.and_return(expected_logs.encode(Encoding::UTF_16BE))
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
end
context 'when logs are nil' do
let(:raw_logs) { nil }
let(:expected_logs) { nil }
it 'returns nil' do
expect(result[:status]).to eq(:success)
expect(result[:logs]).to eq(expected_logs)
end
end
context 'when logs are blank' do
let(:raw_logs) { (+'').force_encoding(Encoding::ASCII_8BIT) }
let(:expected_logs) { '' }
it 'returns blank string' do
expect(result[:status]).to eq(:success)
expect(result[:logs]).to eq(expected_logs)
end
end
context 'when logs are already in utf-8' do
let(:raw_logs) { expected_logs }
it 'does not fail' do
expect(result[:status]).to eq(:success)
expect(result[:logs]).to eq(expected_logs)
end
end
end
describe '#split_logs' do
let(:service) { create(:cluster_platform_kubernetes, :configured) }
let(:expected_logs) do
[
{ message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
{ message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
{ message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
]
end
let(:result_arg) do
{
pod_name: pod_name,
container_name: container_name,
logs: raw_logs
}
end
it 'returns the logs' do
result = subject.send(:split_logs, result_arg)
aggregate_failures do
expect(result[:status]).to eq(:success)
expect(result[:logs]).to eq(expected_logs)
end
end
end
end
Loading
Loading
@@ -82,7 +82,7 @@ Capybara.enable_aria_label = true
Capybara::Screenshot.append_timestamp = false
 
Capybara::Screenshot.register_filename_prefix_formatter(:rspec) do |example|
::File.join(QA::Runtime::Namespace.name, example.full_description.downcase.parameterize(separator: "_")[0..99])
example.full_description.downcase.parameterize(separator: "_")[0..99]
end
# Keep only the screenshots generated from the last failing test suite
Capybara::Screenshot.prune_strategy = :keep_last_run
Loading
Loading
Loading
Loading
@@ -801,10 +801,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.110.0.tgz#3c4f5f0e78fcf616ec63a265754158b84ed80af8"
integrity sha512-bLVUW9Hj6j7zTdeoQELO3Bls5xDKr6AoSEU8gZbEZKLK9PV81hxRl/lJPJUo1qt4E7eJGapCTlH73tTIL4OZ3A==
 
"@gitlab/ui@^9.23.0":
version "9.23.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.23.0.tgz#0ad0232c529d1f8a386c8e86159e273111a55686"
integrity sha512-1VOob5tNPB3zjLHeTuMbQBMG3q6LF36iCq6XqH5eeYzpAI42zj/WhY5T47RKrfvlkflWRSUPTarGo97pQqIKzg==
"@gitlab/ui@^9.23.1":
version "9.23.1"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.23.1.tgz#791d0c8a6762b1dd73ed686326c1dfb3f0c7b987"
integrity sha512-7bGcV2W6qh/KK423W/vasv+S6myWJMD1tyMr5MBz1WQRg/B3eUlpr4HbjQXmtALRWiWkag8GMI/HSy0rby4WrA==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0"
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment