Skip to content
Snippets Groups Projects
Commit 05f4b2fb authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent 9e27f0d9
No related branches found
No related tags found
No related merge requests found
Showing
with 194 additions and 67 deletions
Loading
Loading
@@ -8,6 +8,7 @@ stages:
- review
- qa
- post-test
- notification
- pages
 
variables:
Loading
Loading
@@ -27,11 +28,12 @@ after_script:
- date
 
include:
- local: .gitlab/ci/global.gitlab-ci.yml
- local: .gitlab/ci/cng.gitlab-ci.yml
- local: .gitlab/ci/docs.gitlab-ci.yml
- local: .gitlab/ci/frontend.gitlab-ci.yml
- local: .gitlab/ci/global.gitlab-ci.yml
- local: .gitlab/ci/memory.gitlab-ci.yml
- local: .gitlab/ci/notifications.gitlab-ci.yml
- local: .gitlab/ci/pages.gitlab-ci.yml
- local: .gitlab/ci/qa.gitlab-ci.yml
- local: .gitlab/ci/reports.gitlab-ci.yml
Loading
Loading
Loading
Loading
@@ -6,8 +6,8 @@
/doc/ @axil @marcia @eread @mikelewis
 
# Frontend maintainers should see everything in `app/assets/`
app/assets/ @ClemMakesApps @fatihacet @filipa @mikegreiling @timzallmann @kushalpandya @pslaughter
*.scss @annabeldunstone @ClemMakesApps @fatihacet @filipa @mikegreiling @timzallmann @kushalpandya @pslaughter
app/assets/ @ClemMakesApps @fatihacet @filipa @mikegreiling @timzallmann @kushalpandya @pslaughter @wortschi
*.scss @annabeldunstone @ClemMakesApps @fatihacet @filipa @mikegreiling @timzallmann @kushalpandya @pslaughter @wortschi
 
# Database maintainers should review changes in `db/`
db/ @gitlab-org/maintainers/database
Loading
Loading
Loading
Loading
@@ -110,6 +110,12 @@
- $CI_SERVER_HOST == "gitlab.com" && $CI_PROJECT_NAMESPACE == "gitlab-org"
kubernetes: active
 
.only-canonical-schedules:
only:
refs:
- schedules@gitlab-org/gitlab
- schedules@gitlab-org/gitlab-foss
.use-pg9:
services:
- name: postgres:9.6
Loading
Loading
.notify:
image: alpine
stage: notification
dependencies: []
cache: {}
before_script:
- apk update && apk add git curl bash
schedule:package-and-qa:notify-success:
extends:
- .only-canonical-schedules
- .notify
script:
- 'scripts/notify-slack qa-master ":tada: Scheduled QA against `master` passed! :tada: See $CI_PIPELINE_URL." ci_passing'
needs: ["schedule:package-and-qa"]
when: on_success
schedule:package-and-qa:notify-failure:
extends:
- .only-canonical-schedules
- .notify
script:
- 'scripts/notify-slack qa-master ":skull_and_crossbones: Scheduled QA against `master` failed! :skull_and_crossbones: See $CI_PIPELINE_URL." ci_failing'
needs: ["schedule:package-and-qa"]
when: on_failure
Loading
Loading
@@ -38,9 +38,5 @@ schedule:package-and-qa:
extends:
- .package-and-qa-base
- .only-code-qa-changes
only:
refs:
- schedules@gitlab-org/gitlab
- schedules@gitlab-org/gitlab-foss
- .only-canonical-schedules
needs: ["build-qa-image", "gitlab:assets:compile"]
allow_failure: true
Loading
Loading
@@ -26,8 +26,7 @@ export default {
 
if (log.append) {
if (isNewJobLogActive()) {
state.originalTrace = state.originalTrace.concat(log.trace);
state.trace = updateIncrementalTrace(state.originalTrace, state.trace, log.lines);
state.trace = updateIncrementalTrace(log.lines, state.trace);
} else {
state.trace += log.html;
}
Loading
Loading
@@ -38,7 +37,6 @@ export default {
// html or size. We keep the old value otherwise these
// will be set to `undefined`
if (isNewJobLogActive()) {
state.originalTrace = log.lines || state.trace;
state.trace = logLinesParser(log.lines) || state.trace;
} else {
state.trace = log.html || state.trace;
Loading
Loading
Loading
Loading
@@ -19,7 +19,6 @@ export default () => ({
isScrolledToBottomBeforeReceivingTrace: true,
 
trace: isNewJobLogActive() ? [] : '',
originalTrace: [],
isTraceComplete: false,
traceSize: 0,
isTraceSizeVisible: false,
Loading
Loading
Loading
Loading
@@ -63,6 +63,30 @@ export const isCollapsibleSection = (acc = [], last = {}, section = {}) =>
!section.section_duration &&
section.section === last.line.section;
 
/**
* Returns the lineNumber of the last line in
* a parsed log
*
* @param Array acc
* @returns Number
*/
export const getIncrementalLineNumber = acc => {
let lineNumberValue;
const lastIndex = acc.length - 1;
const lastElement = acc[lastIndex];
const nestedLines = lastElement.lines;
if (lastElement.isHeader && !nestedLines.length && lastElement.line) {
lineNumberValue = lastElement.line.lineNumber;
} else if (lastElement.isHeader && nestedLines.length) {
lineNumberValue = nestedLines[nestedLines.length - 1].lineNumber;
} else {
lineNumberValue = lastElement.lineNumber;
}
return lineNumberValue === 0 ? 1 : lineNumberValue + 1;
};
/**
* Parses the job log content into a structure usable by the template
*
Loading
Loading
@@ -75,32 +99,35 @@ export const isCollapsibleSection = (acc = [], last = {}, section = {}) =>
* - adds the index as lineNumber
*
* @param Array lines
* @param Number lineNumberStart
* @param Array accumulator
* @returns Array parsed log lines
*/
export const logLinesParser = (lines = [], lineNumberStart, accumulator = []) =>
lines.reduce((acc, line, index) => {
const lineNumber = lineNumberStart ? lineNumberStart + index : index;
const last = acc[acc.length - 1];
// If the object is an header, we parse it into another structure
if (line.section_header) {
acc.push(parseHeaderLine(line, lineNumber));
} else if (isCollapsibleSection(acc, last, line)) {
// if the object belongs to a nested section, we append it to the new `lines` array of the
// previously formated header
last.lines.push(parseLine(line, lineNumber));
} else if (line.section_duration) {
// if the line has section_duration, we look for the correct header to add it
addDurationToHeader(acc, line);
} else {
// otherwise it's a regular line
acc.push(parseLine(line, lineNumber));
}
export const logLinesParser = (lines = [], accumulator = []) =>
lines.reduce(
(acc, line, index) => {
const lineNumber = accumulator.length > 0 ? getIncrementalLineNumber(acc) : index;
const last = acc[acc.length - 1];
// If the object is an header, we parse it into another structure
if (line.section_header) {
acc.push(parseHeaderLine(line, lineNumber));
} else if (isCollapsibleSection(acc, last, line)) {
// if the object belongs to a nested section, we append it to the new `lines` array of the
// previously formated header
last.lines.push(parseLine(line, lineNumber));
} else if (line.section_duration) {
// if the line has section_duration, we look for the correct header to add it
addDurationToHeader(acc, line);
} else {
// otherwise it's a regular line
acc.push(parseLine(line, lineNumber));
}
 
return acc;
}, accumulator);
return acc;
},
[...accumulator],
);
 
/**
* Finds the repeated offset, removes the old one
Loading
Loading
@@ -113,7 +140,7 @@ export const logLinesParser = (lines = [], lineNumberStart, accumulator = []) =>
* @returns Array
*
*/
export const findOffsetAndRemove = (newLog, oldParsed) => {
export const findOffsetAndRemove = (newLog = [], oldParsed = []) => {
const cloneOldLog = [...oldParsed];
const lastIndex = cloneOldLog.length - 1;
const last = cloneOldLog[lastIndex];
Loading
Loading
@@ -140,40 +167,13 @@ export const findOffsetAndRemove = (newLog, oldParsed) => {
* We need to check if that is the case by looking for the offset property
* before parsing the incremental part
*
* @param array originalTrace
* @param array oldLog
* @param array newLog
*/
export const updateIncrementalTrace = (originalTrace = [], oldLog = [], newLog = []) => {
const firstLine = newLog[0];
const firstLineOffset = firstLine.offset;
export const updateIncrementalTrace = (newLog, oldParsed = []) => {
const parsedLog = findOffsetAndRemove(newLog, oldParsed);
 
// We are going to return a new array,
// let's make a shallow copy to make sure we
// are not updating the state outside of a mutation first.
const cloneOldLog = [...oldLog];
const lastIndex = cloneOldLog.length - 1;
const lastLine = cloneOldLog[lastIndex];
// The last line may be inside a collpasible section
// If it is, we use the not parsed saved log, remove the last element
// and parse the first received part togheter with the incremental log
if (
lastLine.isHeader &&
(lastLine.line.offset === firstLineOffset ||
(lastLine.lines.length &&
lastLine.lines[lastLine.lines.length - 1].offset === firstLineOffset))
) {
const cloneOriginal = [...originalTrace];
cloneOriginal.splice(cloneOriginal.length - 1);
return logLinesParser(cloneOriginal.concat(newLog));
} else if (lastLine.offset === firstLineOffset) {
cloneOldLog.splice(lastIndex);
return cloneOldLog.concat(logLinesParser(newLog, cloneOldLog.length));
}
// there are no matches, let's parse the new log and return them together
return cloneOldLog.concat(logLinesParser(newLog, cloneOldLog.length));
return logLinesParser(newLog, parsedLog);
};
 
export const isNewJobLogActive = () => gon && gon.features && gon.features.jobLogJson;
Loading
Loading
@@ -5,6 +5,7 @@ import tooltip from '~/vue_shared/directives/tooltip';
import Icon from '~/vue_shared/components/icon.vue';
import eventHub from '~/sidebar/event_hub';
import editForm from './edit_form.vue';
import recaptchaModalImplementor from '~/vue_shared/mixins/recaptcha_modal_implementor';
 
export default {
components: {
Loading
Loading
@@ -14,6 +15,7 @@ export default {
directives: {
tooltip,
},
mixins: [recaptchaModalImplementor],
props: {
isConfidential: {
required: true,
Loading
Loading
@@ -54,9 +56,14 @@ export default {
updateConfidentialAttribute(confidential) {
this.service
.update('issue', { confidential })
.then(({ data }) => this.checkForSpam(data))
.then(() => window.location.reload())
.catch(() => {
Flash(__('Something went wrong trying to change the confidentiality of this issue'));
.catch(error => {
if (error.name === 'SpamError') {
this.openRecaptcha();
} else {
Flash(__('Something went wrong trying to change the confidentiality of this issue'));
}
});
},
},
Loading
Loading
@@ -112,5 +119,7 @@ export default {
{{ __('This issue is confidential') }}
</div>
</div>
<recaptcha-modal v-if="showRecaptcha" :html="recaptchaHTML" @close="closeRecaptcha" />
</div>
</template>
Loading
Loading
@@ -32,6 +32,10 @@ export default {
 
mounted() {
eventHub.$on('submit', this.submit);
if (this.html) {
this.appendRecaptchaScript();
}
},
 
beforeDestroy() {
Loading
Loading
# frozen_string_literal: true
module Evidences
class AuthorEntity < Grape::Entity
expose :id
expose :name
expose :email
end
end
# frozen_string_literal: true
module Evidences
class IssueEntity < Grape::Entity
expose :id
expose :title
expose :description
expose :author, using: AuthorEntity
expose :state
expose :iid
expose :confidential
expose :created_at
expose :due_date
end
end
# frozen_string_literal: true
module Evidences
class MilestoneEntity < Grape::Entity
expose :id
expose :title
expose :description
expose :state
expose :iid
expose :created_at
expose :due_date
expose :issues, using: IssueEntity
end
end
# frozen_string_literal: true
module Evidences
class ProjectEntity < Grape::Entity
expose :id
expose :name
expose :description
expose :created_at
end
end
# frozen_string_literal: true
module Evidences
class ReleaseEntity < Grape::Entity
expose :id
expose :tag, as: :tag_name
expose :name
expose :description
expose :created_at
expose :project, using: ProjectEntity
expose :milestones, using: MilestoneEntity
end
end
# frozen_string_literal: true
module Evidences
class ReleaseSerializer < BaseSerializer
entity ReleaseEntity
end
end
---
title: Fix routing bugs in security dashboards
merge_request: 16738
author:
type: fixed
---
title: 'Geo: Invalidate cache after refreshing foreign tables'
merge_request: 17885
author:
type: fixed
---
title: Display reCAPTCHA modal when making issue public
merge_request: 17553
author:
type: fixed
Loading
Loading
@@ -850,3 +850,8 @@ To remove the proxy setting, run the following commands (depending on which vari
unset http_proxy
unset https_proxy
```
### Praefect
Praefect is an experimental daemon that allows for replication of the Git data.
It can be setup with omnibus, [as explained here](./praefect.md).
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment