Skip to content
Snippets Groups Projects
Commit b452bdea authored by Tim Zallmann's avatar Tim Zallmann
Browse files

Actual MR diff displayed

parent f62359c2
No related branches found
No related tags found
No related merge requests found
Showing
with 195 additions and 339 deletions
Loading
Loading
@@ -13,6 +13,10 @@ const Api = {
mergeRequestPath: '/api/:version/projects/:id/merge_requests/:mrid',
mergeRequestChangesPath:
'/api/:version/projects/:id/merge_requests/:mrid/changes',
mergeRequestVersionsPath:
'/api/:version/projects/:id/merge_requests/:mrid/versions',
mergeRequestVersionPath:
'/api/:version/projects/:id/merge_requests/:mrid/version/:versionid',
groupLabelsPath: '/groups/:namespace_path/-/labels',
licensePath: '/api/:version/templates/licenses/:key',
gitignorePath: '/api/:version/templates/gitignores/:key',
Loading
Loading
@@ -109,7 +113,6 @@ const Api = {
return axios.get(url);
},
 
// Return Merge Request Changes
mergeRequestChanges(projectPath, mergeRequestId) {
const url = Api.buildUrl(Api.mergeRequestChangesPath)
.replace(':id', encodeURIComponent(projectPath))
Loading
Loading
@@ -118,6 +121,23 @@ const Api = {
return axios.get(url);
},
 
mergeRequestVersions(projectPath, mergeRequestId) {
const url = Api.buildUrl(Api.mergeRequestVersionsPath)
.replace(':id', encodeURIComponent(projectPath))
.replace(':mrid', mergeRequestId);
return axios.get(url);
},
mergeRequestVersion(projectPath, mergeRequestId, versionId) {
const url = Api.buildUrl(Api.mergeRequestVersionPath)
.replace(':id', encodeURIComponent(projectPath))
.replace(':mrid', mergeRequestId)
.replace(':versionid', versionId);
return axios.get(url);
},
newLabel(namespacePath, projectPath, data, callback) {
let url;
 
Loading
Loading
<script>
import icon from '~/vue_shared/components/icon.vue';
import icon from '~/vue_shared/components/icon.vue';
 
export default {
components: {
icon,
export default {
components: {
icon,
},
props: {
file: {
type: Object,
required: true,
},
props: {
file: {
type: Object,
required: true,
},
},
computed: {
changedIcon() {
if (this.file.tempFile) return 'file-addition';
return this.file.changed ? 'file-modified' : 'git-merge';
},
computed: {
changedIcon() {
return this.file.tempFile ? 'file-addition' : 'file-modified';
},
changedIconClass() {
return `multi-${this.changedIcon}`;
},
changedIconClass() {
return `multi-${this.changedIcon}`;
},
};
},
};
</script>
 
<template>
Loading
Loading
<script>
/* global monaco */
import { mapState, mapActions } from 'vuex';
import { mapState, mapGetters, mapActions } from 'vuex';
import flash from '~/flash';
import monacoLoader from '../monaco_loader';
import Editor from '../lib/editor';
Loading
Loading
@@ -19,6 +19,7 @@ export default {
'viewer',
'delayViewerUpdated',
]),
...mapGetters(['currentMergeRequest']),
shouldHideEditor() {
return this.file && this.file.binary && !this.file.raw;
},
Loading
Loading
@@ -68,7 +69,12 @@ export default {
 
this.editor.clearEditor();
 
this.getRawFileData(this.file)
this.getRawFileData({
path: this.file.path,
baseSha: this.currentMergeRequest
? this.currentMergeRequest.baseCommitSha
: '',
})
.then(() => {
const viewerPromise = this.delayViewerUpdated
? this.updateViewer('editor')
Loading
Loading
Loading
Loading
@@ -104,7 +104,7 @@ export default {
</span>
<changed-file-icon
:file="file"
v-if="file.changed || file.tempFile"
v-if="file.changed || file.tempFile || file.mrDiff"
class="prepend-top-5 pull-right"
/>
<new-dropdown
Loading
Loading
Loading
Loading
@@ -123,40 +123,37 @@ router.beforeEach((to, from, next) => {
mergeRequestId: to.params.mrid,
})
.then(mrChanges => {
if (mrChanges.changes.length > 0) {
}
mrChanges.changes.forEach((change, ind) => {
console.log(`CHANGE : ${ind} : `, change);
const changeTreeEntry =
store.state.entries[change.new_path];
console.log(
'Tree Entry for the change ',
changeTreeEntry,
change.diff,
);
if (changeTreeEntry) {
store.dispatch('setFileMrDiff', {
file: changeTreeEntry,
mrDiff: change.diff,
store
.dispatch('getMergeRequestVersions', {
projectId: fullProjectId,
mergeRequestId: to.params.mrid,
})
.then(() => {
mrChanges.changes.forEach((change, ind) => {
const changeTreeEntry =
store.state.entries[change.new_path];
if (changeTreeEntry) {
store.dispatch('setFileMrDiff', {
file: changeTreeEntry,
mrDiff: change.diff,
});
if (ind < 5) {
store.dispatch('getFileData', {
path: change.new_path,
makeFileActive: ind === 0,
});
}
}
});
store.dispatch('setFileTargetBranch', {
file: changeTreeEntry,
targetBranch: mrChanges.target_branch,
});
if (ind === 0) {
store.dispatch('getFileData', change.new_path);
} else {
// TODO : Implement Tab reloading
store.dispatch('preloadFileTab', changeTreeEntry);
}
} else {
console.warn(`No Tree Entry for ${change.new_path}`);
}
});
})
.catch(e => {
flash(
'Error while loading the merge request versions. Please try again.',
);
throw e;
});
})
.catch(e => {
flash(
Loading
Loading
Loading
Loading
@@ -22,10 +22,10 @@ export default class Model {
)),
);
 
if (this.file.targetBranch) {
if (this.file.baseRaw) {
this.disposable.add(
(this.targetModel = this.monaco.editor.createModel(
this.file.targetRaw,
(this.baseModel = this.monaco.editor.createModel(
this.file.baseRaw,
undefined,
new this.monaco.Uri(null, null, `target/${this.file.path}`),
)),
Loading
Loading
@@ -68,8 +68,8 @@ export default class Model {
return this.originalModel;
}
 
getTargetModel() {
return this.targetModel;
getBaseModel() {
return this.baseModel;
}
 
setValue(value) {
Loading
Loading
export function revertPatch(source, uniDiff, options = {}) {
if (typeof uniDiff === 'string') {
uniDiff = parsePatch(uniDiff);
}
if (Array.isArray(uniDiff)) {
if (uniDiff.length > 1) {
throw new Error('applyPatch only works with a single input.');
}
uniDiff = uniDiff[0];
}
// Apply the diff to the input
let lines = source.split(/\r\n|[\n\v\f\r\x85]/),
delimiters = source.match(/\r\n|[\n\v\f\r\x85]/g) || [],
hunks = uniDiff.hunks,
compareLine =
options.compareLine ||
((lineNumber, line, operation, patchContent) => line === patchContent),
errorCount = 0,
fuzzFactor = options.fuzzFactor || 0,
minLine = 0,
offset = 0,
removeEOFNL,
addEOFNL;
/**
* Checks if the hunk exactly fits on the provided location
*/
function hunkFits(hunk, toPos) {
for (let j = 0; j < hunk.lines.length; j++) {
let line = hunk.lines[j],
operation = line[0],
content = line.substr(1);
if (operation === ' ' || operation === '-') {
// Context sanity check
if (!compareLine(toPos + 1, lines[toPos], operation, content)) {
errorCount++;
if (errorCount > fuzzFactor) {
return false;
}
}
toPos++;
}
}
return true;
}
// Search best fit offsets for each hunk based on the previous ones
for (let i = 0; i < hunks.length; i++) {
let hunk = hunks[i],
maxLine = lines.length - hunk.oldLines,
localOffset = 0,
toPos = offset + hunk.oldStart - 1;
const iterator = distanceIterator(toPos, minLine, maxLine);
for (; localOffset !== undefined; localOffset = iterator()) {
if (hunkFits(hunk, toPos + localOffset)) {
hunk.offset = offset += localOffset;
break;
}
}
if (localOffset === undefined) {
return false;
}
// Set lower text limit to end of the current hunk, so next ones don't try
// to fit over already patched text
minLine = hunk.offset + hunk.oldStart + hunk.oldLines;
}
// Apply patch hunks
let diffOffset = 0;
for (let i = 0; i < hunks.length; i++) {
let hunk = hunks[i],
toPos = hunk.oldStart + hunk.offset + diffOffset - 1;
diffOffset += hunk.newLines - hunk.oldLines;
if (toPos < 0) {
// Creating a new file
toPos = 0;
}
for (let j = 0; j < hunk.lines.length; j++) {
let line = hunk.lines[j],
operation = line[0],
content = line.substr(1),
delimiter = hunk.linedelimiters[j];
// Turned around the commands to revert the applying
if (operation === ' ') {
toPos++;
} else if (operation === '+') {
lines.splice(toPos, 1);
delimiters.splice(toPos, 1);
/* istanbul ignore else */
} else if (operation === '-') {
lines.splice(toPos, 0, content);
delimiters.splice(toPos, 0, delimiter);
toPos++;
} else if (operation === '\\') {
const previousOperation = hunk.lines[j - 1]
? hunk.lines[j - 1][0]
: null;
if (previousOperation === '+') {
removeEOFNL = true;
} else if (previousOperation === '-') {
addEOFNL = true;
}
}
}
}
// Handle EOFNL insertion/removal
if (removeEOFNL) {
while (!lines[lines.length - 1]) {
lines.pop();
delimiters.pop();
}
} else if (addEOFNL) {
lines.push('');
delimiters.push('\n');
}
for (let _k = 0; _k < lines.length - 1; _k++) {
lines[_k] = lines[_k] + delimiters[_k];
}
return lines.join('');
}
/**
* Utility Function
* @param {*} start
* @param {*} minLine
* @param {*} maxLine
*/
const distanceIterator = function(start, minLine, maxLine) {
let wantForward = true,
backwardExhausted = false,
forwardExhausted = false,
localOffset = 1;
return function iterator() {
if (wantForward && !forwardExhausted) {
if (backwardExhausted) {
localOffset++;
} else {
wantForward = false;
}
// Check if trying to fit beyond text length, and if not, check it fits
// after offset location (or desired location on first iteration)
if (start + localOffset <= maxLine) {
return localOffset;
}
forwardExhausted = true;
}
if (!backwardExhausted) {
if (!forwardExhausted) {
wantForward = true;
}
// Check if trying to fit before text beginning, and if not, check it fits
// before offset location
if (minLine <= start - localOffset) {
return -localOffset++;
}
backwardExhausted = true;
return iterator();
}
// We tried to fit hunk before text beginning and beyond text length, then
// hunk can't fit on the text. Return undefined
};
};
Loading
Loading
@@ -111,7 +111,7 @@ export default class Editor {
 
attachMergeRequestModel(model) {
this.instance.setModel({
original: model.getTargetModel(),
original: model.getBaseModel(),
modified: model.getModel(),
});
}
Loading
Loading
import Vue from 'vue';
import VueResource from 'vue-resource';
import Api from '~/api';
import { version } from 'punycode';
 
Vue.use(VueResource);
 
Loading
Loading
@@ -24,6 +25,21 @@ export default {
.get(file.rawPath, { params: { format: 'json' } })
.then(res => res.text());
},
getBaseRawFileData(file, sha) {
if (file.tempFile) {
return Promise.resolve(file.baseRaw);
}
if (file.baseRaw) {
return Promise.resolve(file.baseRaw);
}
return Vue.http
.get(file.rawPath.replace(file.branchId, sha), {
params: { format: 'json' },
})
.then(res => res.text());
},
getProjectData(namespace, project) {
return Api.project(`${namespace}/${project}`);
},
Loading
Loading
@@ -33,6 +49,12 @@ export default {
getProjectMergeRequestChanges(projectId, mergeRequestId) {
return Api.mergeRequestChanges(projectId, mergeRequestId);
},
getProjectMergeRequestVersions(projectId, mergeRequestId) {
return Api.mergeRequestVersions(projectId, mergeRequestId);
},
getProjectMergeRequestVersion(projectId, mergeRequestId, versionId) {
return Api.mergeRequestVersion(projectId, mergeRequestId, versionId);
},
getBranchData(projectId, currentBranchId) {
return Api.branchSingle(projectId, currentBranchId);
},
Loading
Loading
import { normalizeHeaders } from '~/lib/utils/common_utils';
import { parsePatch, applyPatches } from 'diff';
import { revertPatch } from '../../lib/diff/revert_patch';
import flash from '~/flash';
import eventHub from '../../eventhub';
import service from '../../services';
Loading
Loading
@@ -48,7 +47,10 @@ export const setFileActive = ({ commit, state, getters, dispatch }, path) => {
commit(types.SET_CURRENT_BRANCH, file.branchId);
};
 
export const getFileData = ({ state, commit, dispatch }, path) => {
export const getFileData = (
{ state, commit, dispatch },
{ path, makeFileActive = true },
) => {
const file = state.entries[path];
return new Promise((resolve, reject) => {
commit(types.TOGGLE_LOADING, { entry: file });
Loading
Loading
@@ -66,7 +68,7 @@ export const getFileData = ({ state, commit, dispatch }, path) => {
.then(data => {
commit(types.SET_FILE_DATA, { data, file });
commit(types.TOGGLE_FILE_OPEN, path);
dispatch('setFileActive', file.path);
if (makeFileActive) dispatch('setFileActive', file.path);
commit(types.TOGGLE_LOADING, { entry: file });
})
.catch(err => {
Loading
Loading
@@ -80,98 +82,38 @@ export const getFileData = ({ state, commit, dispatch }, path) => {
false,
true,
);
reject(err);
});
});
};
 
export const preloadFileTab = ({ state, commit, dispatch }, file) => {
return new Promise((resolve, reject) => {
commit(types.TOGGLE_LOADING, { entry: file });
service
.getFileData(file.url)
.then(data => {
commit(types.SET_FILE_DATA, { data, file });
commit(types.TOGGLE_FILE_OPEN, file);
commit(types.TOGGLE_LOADING, { entry: file });
})
.catch(() => {
commit(types.TOGGLE_LOADING, { entry: file });
flash(
'Error loading file data. Please try again.',
'alert',
document,
null,
false,
true,
);
});
});
};
export const setFileTargetBranch = (
{ state, commit },
{ file, targetBranch },
) => {
commit(types.SET_FILE_TARGET_BRANCH, {
file,
targetBranch,
targetRawPath: file.rawPath.replace(file.branchId, targetBranch),
});
};
export const processFileMrDiff = ({ state, commit }, file) => {
const patchObj = parsePatch(file.mrDiff);
const transformedContent = applyPatch(file.raw, file.mrDiff);
debugger;
};
export const setFileMrDiff = ({ state, commit }, { file, mrDiff }) => {
commit(types.SET_FILE_MR_DIFF, { file, mrDiff });
};
 
export const getRawFileData = ({ commit, dispatch }, file) => {
export const getRawFileData = (
{ state, commit, dispatch },
{ path, baseSha },
) => {
const file = state.entries[path];
return new Promise((resolve, reject) => {
service
.getRawFileData(file)
.then(raw => {
commit(types.SET_FILE_RAW_DATA, { file, raw });
if (file.mrDiff) {
const patchObj = parsePatch(file.mrDiff);
patchObj[0].hunks.forEach(hunk => {
console.log('H ', hunk);
/*hunk.lines.forEach((line) => {
if (line.substr(0, 1) === '+') {
line = '-' + line.substr(1);
} else if (line.substr(0, 1) === '-') {
line = '+' + line.substr(1);
}
})*/
});
console.log('PATCH OBJ : ' + JSON.stringify(patchObj));
const transformedContent = revertPatch(raw, patchObj, {
compareLine: (lineNumber, line, operation, patchContent) => {
const tempLine = line;
//line = patchContent;
//patchContent = tempLine;
if (operation === '-') {
operation = '+';
} else if (operation === '+') {
operation = '-';
}
console.log(
'COMPARE : ' + line + ' - ' + operation + ' - ' + patchContent,
);
return true;
},
});
console.log('TRANSFORMED : ', transformedContent);
commit(types.SET_FILE_TARGET_RAW_DATA, {
file,
raw: transformedContent,
});
resolve(raw);
service
.getBaseRawFileData(file, baseSha)
.then(baseRaw => {
commit(types.SET_FILE_BASE_RAW_DATA, {
file,
baseRaw,
});
resolve(raw);
})
.catch(e => {
reject(e);
});
} else {
resolve(raw);
}
Loading
Loading
Loading
Loading
@@ -19,10 +19,7 @@ export const getMergeRequestData = (
mergeRequest: data,
});
if (!state.currentMergeRequestId) {
commit(
types.SET_CURRENT_MERGE_REQUEST,
`${projectId}/${mergeRequestId}`,
);
commit(types.SET_CURRENT_MERGE_REQUEST, mergeRequestId);
}
resolve(data);
})
Loading
Loading
@@ -42,7 +39,7 @@ export const getMergeRequestChanges = (
) =>
new Promise((resolve, reject) => {
if (
!state.projects[projectId].mergeRequests[mergeRequestId].changes ||
!state.projects[projectId].mergeRequests[mergeRequestId].changes.length ||
force
) {
service
Loading
Loading
@@ -65,6 +62,36 @@ export const getMergeRequestChanges = (
}
});
 
export const getMergeRequestVersions = (
{ commit, state, dispatch },
{ projectId, mergeRequestId, force = false } = {},
) =>
new Promise((resolve, reject) => {
if (
!state.projects[projectId].mergeRequests[mergeRequestId].versions
.length ||
force
) {
service
.getProjectMergeRequestVersions(projectId, mergeRequestId)
.then(res => res.data)
.then(data => {
commit(types.SET_MERGE_REQUEST_VERSIONS, {
projectPath: projectId,
mergeRequestId,
versions: data,
});
resolve(data);
})
.catch(() => {
flash('Error loading merge request versions. Please try again.');
reject(new Error(`Merge Request Versions not loaded ${projectId}`));
});
} else {
resolve(state.projects[projectId].mergeRequests[mergeRequestId].versions);
}
});
// eslint-disable-next-line import/prefer-default-export
export const getMergeRequestNotes = (
{ commit, state, dispatch },
Loading
Loading
Loading
Loading
@@ -19,7 +19,7 @@ export const handleTreeEntryAction = ({ commit, dispatch }, row) => {
 
dispatch('setFileActive', row.path);
} else {
dispatch('getFileData', row.path);
dispatch('getFileData', { path: row.path });
}
};
 
Loading
Loading
Loading
Loading
@@ -23,6 +23,11 @@ export const projectsWithTrees = state =>
};
});
 
export const currentMergeRequest = state =>
state.projects[state.currentProjectId].mergeRequests[
state.currentMergeRequestId
];
// eslint-disable-next-line no-confusing-arrow
export const currentIcon = state =>
state.rightPanelCollapsed ? 'angle-double-left' : 'angle-double-right';
Loading
Loading
Loading
Loading
@@ -15,6 +15,8 @@ export const TOGGLE_PROJECT_OPEN = 'TOGGLE_PROJECT_OPEN';
export const SET_MERGE_REQUEST = 'SET_MERGE_REQUEST';
export const SET_CURRENT_MERGE_REQUEST = 'SET_CURRENT_MERGE_REQUEST';
export const SET_MERGE_REQUEST_CHANGES = 'SET_MERGE_REQUEST_CHANGES';
export const SET_MERGE_REQUEST_VERSIONS = 'SET_MERGE_REQUEST_VERSIONS';
export const SET_MERGE_REQUEST_VERSION = 'SET_MERGE_REQUEST_VERSION';
export const SET_MERGE_REQUEST_NOTES = 'SET_MERGE_REQUEST_NOTES';
 
// Branch Mutation Types
Loading
Loading
@@ -34,7 +36,7 @@ export const SET_FILE_DATA = 'SET_FILE_DATA';
export const TOGGLE_FILE_OPEN = 'TOGGLE_FILE_OPEN';
export const SET_FILE_ACTIVE = 'SET_FILE_ACTIVE';
export const SET_FILE_RAW_DATA = 'SET_FILE_RAW_DATA';
export const SET_FILE_TARGET_RAW_DATA = 'SET_FILE_TARGET_RAW_DATA';
export const SET_FILE_BASE_RAW_DATA = 'SET_FILE_BASE_RAW_DATA';
export const UPDATE_FILE_CONTENT = 'UPDATE_FILE_CONTENT';
export const SET_FILE_LANGUAGE = 'SET_FILE_LANGUAGE';
export const SET_FILE_POSITION = 'SET_FILE_POSITION';
Loading
Loading
Loading
Loading
@@ -28,6 +28,8 @@ export default {
rawPath: data.raw_path,
binary: data.binary,
renderError: data.render_error,
raw: null,
baseRaw: null,
});
},
[types.SET_FILE_RAW_DATA](state, { file, raw }) {
Loading
Loading
@@ -35,9 +37,9 @@ export default {
raw,
});
},
[types.SET_FILE_TARGET_RAW_DATA](state, { file, raw }) {
Object.assign(file, {
targetRaw: raw,
[types.SET_FILE_BASE_RAW_DATA](state, { file, baseRaw }) {
Object.assign(state.entries[file.path], {
baseRaw,
});
},
[types.UPDATE_FILE_CONTENT](state, { path, content }) {
Loading
Loading
@@ -69,11 +71,6 @@ export default {
mrDiff,
});
},
[types.SET_FILE_TARGET_BRANCH](state, { file, targetBranch }) {
Object.assign(file, {
targetBranch,
});
},
[types.DISCARD_FILE_CHANGES](state, path) {
Object.assign(state.entries[path], {
content: state.entries[path].raw,
Loading
Loading
Loading
Loading
@@ -13,6 +13,9 @@ export default {
// Add client side properties
Object.assign(mergeRequest, {
active: true,
changes: [],
versions: [],
baseCommitSha: null,
});
 
Object.assign(state.projects[projectPath], {
Loading
Loading
@@ -29,6 +32,23 @@ export default {
changes,
});
},
[types.SET_MERGE_REQUEST_VERSIONS](
state,
{ projectPath, mergeRequestId, versions },
) {
Object.assign(state.projects[projectPath].mergeRequests[mergeRequestId], {
versions,
baseCommitSha: versions.length ? versions[0].base_commit_sha : null,
});
},
[types.SET_MERGE_REQUEST_VERSION](
state,
{ projectPath, mergeRequestId, changes },
) {
Object.assign(state.projects[projectPath].mergeRequests[mergeRequestId], {
changes,
});
},
[types.SET_MERGE_REQUEST_NOTES](
state,
{ projectPath, mergeRequestId, notes },
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment