Skip to content
Snippets Groups Projects
Commit 7aada820 authored by GitLab Bot's avatar GitLab Bot
Browse files

Add latest changes from gitlab-org/gitlab@master

parent b5ad0617
No related branches found
No related tags found
No related merge requests found
Showing
with 445 additions and 124 deletions
Loading
Loading
@@ -264,7 +264,7 @@ gem 'licensee', '~> 8.9'
gem 'ace-rails-ap', '~> 4.1.0'
 
# Detect and convert string character encoding
gem 'charlock_holmes', '~> 0.7.5'
gem 'charlock_holmes', '~> 0.7.7'
 
# Detect mime content type from content
gem 'mimemagic', '~> 0.3.2'
Loading
Loading
Loading
Loading
@@ -142,7 +142,7 @@ GEM
mime-types (>= 1.16)
cause (0.1)
character_set (1.1.2)
charlock_holmes (0.7.6)
charlock_holmes (0.7.7)
childprocess (0.9.0)
ffi (~> 1.0, >= 1.0.11)
chronic (0.10.2)
Loading
Loading
@@ -1143,7 +1143,7 @@ DEPENDENCIES
capybara (~> 3.22.0)
capybara-screenshot (~> 1.0.22)
carrierwave (~> 1.3)
charlock_holmes (~> 0.7.5)
charlock_holmes (~> 0.7.7)
chronic (~> 0.10.2)
commonmarker (~> 0.20)
concurrent-ruby (~> 1.1)
Loading
Loading
Loading
Loading
@@ -93,7 +93,7 @@ class List {
entityType = 'milestone_id';
}
 
return gl.boardService
return boardsStore
.createList(entity.id, entityType)
.then(res => res.data)
.then(data => {
Loading
Loading
@@ -111,14 +111,14 @@ class List {
boardsStore.state.lists.splice(index, 1);
boardsStore.updateNewListDropdown(this.id);
 
gl.boardService.destroyList(this.id).catch(() => {
boardsStore.destroyList(this.id).catch(() => {
// TODO: handle request error
});
}
 
update() {
const collapsed = !this.isExpanded;
return gl.boardService.updateList(this.id, this.position, collapsed).catch(() => {
return boardsStore.updateList(this.id, this.position, collapsed).catch(() => {
// TODO: handle request error
});
}
Loading
Loading
@@ -147,7 +147,7 @@ class List {
this.loading = true;
}
 
return gl.boardService
return boardsStore
.getIssuesForList(this.id, data)
.then(res => res.data)
.then(data => {
Loading
Loading
@@ -168,7 +168,7 @@ class List {
this.addIssue(issue, null, 0);
this.issuesSize += 1;
 
return gl.boardService
return boardsStore
.newIssue(this.id, issue)
.then(res => res.data)
.then(data => this.onNewIssueResponse(issue, data));
Loading
Loading
@@ -276,7 +276,7 @@ class List {
this.issues.splice(oldIndex, 1);
this.issues.splice(newIndex, 0, issue);
 
gl.boardService.moveIssue(issue.id, null, null, moveBeforeId, moveAfterId).catch(() => {
boardsStore.moveIssue(issue.id, null, null, moveBeforeId, moveAfterId).catch(() => {
// TODO: handle request error
});
}
Loading
Loading
@@ -287,7 +287,7 @@ class List {
});
this.issues.splice(newIndex, 0, ...issues);
 
gl.boardService
boardsStore
.moveMultipleIssues({
ids: issues.map(issue => issue.id),
fromListId: null,
Loading
Loading
@@ -299,15 +299,13 @@ class List {
}
 
updateIssueLabel(issue, listFrom, moveBeforeId, moveAfterId) {
gl.boardService
.moveIssue(issue.id, listFrom.id, this.id, moveBeforeId, moveAfterId)
.catch(() => {
// TODO: handle request error
});
boardsStore.moveIssue(issue.id, listFrom.id, this.id, moveBeforeId, moveAfterId).catch(() => {
// TODO: handle request error
});
}
 
updateMultipleIssues(issues, listFrom, moveBeforeId, moveAfterId) {
gl.boardService
boardsStore
.moveMultipleIssues({
ids: issues.map(issue => issue.id),
fromListId: listFrom.id,
Loading
Loading
@@ -359,7 +357,7 @@ class List {
 
if (this.issuesSize > 1) {
const moveBeforeId = this.issues[1].id;
gl.boardService.moveIssue(issue.id, null, null, null, moveBeforeId);
boardsStore.moveIssue(issue.id, null, null, null, moveBeforeId);
}
}
}
Loading
Loading
Loading
Loading
@@ -9,6 +9,12 @@ module Mutations
GitlabSchema.object_from_id(id)
end
 
def map_to_global_ids(ids)
return [] if ids.blank?
ids.map { |id| to_global_id(id) }
end
def to_global_id(id)
::URI::GID.build(app: GlobalID.app, model_name: Todo.name, model_id: id, params: nil).to_s
end
Loading
Loading
# frozen_string_literal: true
module Mutations
module Todos
class MarkAllDone < ::Mutations::Todos::Base
graphql_name 'TodosMarkAllDone'
authorize :update_user
field :updated_ids,
[GraphQL::ID_TYPE],
null: false,
description: 'Ids of the updated todos'
def resolve
authorize!(current_user)
updated_ids = mark_all_todos_done
{
updated_ids: map_to_global_ids(updated_ids),
errors: []
}
end
private
def mark_all_todos_done
return [] unless current_user
TodoService.new.mark_all_todos_as_done_by_user(current_user)
end
end
end
end
Loading
Loading
@@ -22,6 +22,7 @@ module Types
mount_mutation Mutations::Notes::Destroy
mount_mutation Mutations::Todos::MarkDone
mount_mutation Mutations::Todos::Restore
mount_mutation Mutations::Todos::MarkAllDone
end
end
 
Loading
Loading
Loading
Loading
@@ -174,6 +174,11 @@ class TodoService
mark_todos_as_done(todos, current_user)
end
 
def mark_all_todos_as_done_by_user(current_user)
todos = TodosFinder.new(current_user).execute
mark_todos_as_done(todos, current_user)
end
# When user marks some todos as pending
def mark_todos_as_pending(todos, current_user)
update_todos_state(todos, current_user, :pending)
Loading
Loading
---
title: Add GraphQL mutation to mark all todos done for a user
merge_request: 19482
author:
type: added
---
title: Removes references of BoardService in list file
merge_request: 20145
author: nuwe1
type: other
Loading
Loading
@@ -3520,6 +3520,7 @@ type Mutation {
removeAwardEmoji(input: RemoveAwardEmojiInput!): RemoveAwardEmojiPayload
todoMarkDone(input: TodoMarkDoneInput!): TodoMarkDonePayload
todoRestore(input: TodoRestoreInput!): TodoRestorePayload
todosMarkAllDone(input: TodosMarkAllDoneInput!): TodosMarkAllDonePayload
toggleAwardEmoji(input: ToggleAwardEmojiInput!): ToggleAwardEmojiPayload
updateEpic(input: UpdateEpicInput!): UpdateEpicPayload
updateNote(input: UpdateNoteInput!): UpdateNotePayload
Loading
Loading
@@ -5060,6 +5061,36 @@ enum TodoTargetEnum {
MERGEREQUEST
}
 
"""
Autogenerated input type of TodosMarkAllDone
"""
input TodosMarkAllDoneInput {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
}
"""
Autogenerated return type of TodosMarkAllDone
"""
type TodosMarkAllDonePayload {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Reasons why the mutation failed.
"""
errors: [String!]!
"""
Ids of the updated todos
"""
updatedIds: [ID!]!
}
"""
Autogenerated input type of ToggleAwardEmoji
"""
Loading
Loading
Loading
Loading
@@ -14355,6 +14355,33 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "todosMarkAllDone",
"description": null,
"args": [
{
"name": "input",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "INPUT_OBJECT",
"name": "TodosMarkAllDoneInput",
"ofType": null
}
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "TodosMarkAllDonePayload",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "toggleAwardEmoji",
"description": null,
Loading
Loading
@@ -16825,6 +16852,106 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "TodosMarkAllDonePayload",
"description": "Autogenerated return type of TodosMarkAllDone",
"fields": [
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "errors",
"description": "Reasons why the mutation failed.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "updatedIds",
"description": "Ids of the updated todos",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "TodosMarkAllDoneInput",
"description": "Autogenerated input type of TodosMarkAllDone",
"fields": null,
"inputFields": [
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "DesignManagementUploadPayload",
Loading
Loading
Loading
Loading
@@ -785,6 +785,14 @@ The API can be explored interactively using the [GraphiQL IDE](../index.md#graph
| `errors` | String! => Array | Reasons why the mutation failed. |
| `todo` | Todo! | The requested todo |
 
### TodosMarkAllDonePayload
| Name | Type | Description |
| --- | ---- | ---------- |
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Reasons why the mutation failed. |
| `updatedIds` | ID! => Array | Ids of the updated todos |
### ToggleAwardEmojiPayload
 
| Name | Type | Description |
Loading
Loading
Loading
Loading
@@ -1539,9 +1539,14 @@ cache:
 
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/18986) in GitLab v12.5.
 
If `cache:key:files` is added, one or two files must be defined with it. The cache `key`
will be a SHA computed from the most recent commits (one or two) that changed the
given files. If neither file was changed in any commits, the key will be `default`.
The `cache:key:files` keyword extends the `cache:key` functionality by making it easier
to reuse some caches, and rebuild them less often, which will speed up subsequent pipeline
runs.
When you include `cache:key:files`, you must also list the project files that will be used to generate the key, up to a maximum of two files.
The cache `key` will be a SHA checksum computed from the most recent commits (up to two, if two files are listed)
that changed the given files. If neither file was changed in any commits,
the fallback key will be `default`.
 
```yaml
cache:
Loading
Loading
@@ -1554,20 +1559,26 @@ cache:
- node_modules
```
 
In this example we are creating a cache for Ruby and Nodejs dependencies that
is tied to current versions of the `Gemfile.lock` and `package.json` files. Whenever one of
these files changes, a new cache key is computed and a new cache is created. Any future
job runs using the same `Gemfile.lock` and `package.json` with `cache:key:files` will
use the new cache, instead of rebuilding the dependencies.
##### `cache:key:prefix`
 
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/18986) in GitLab v12.5.
The `prefix` parameter adds extra functionality to `key:files` by allowing the key to
be composed of the given `prefix` combined with the SHA computed for `cache:key:files`.
For example, adding a `prefix` of `rspec`, will
cause keys to look like: `rspec-feef9576d21ee9b6a32e30c5c79d0a0ceb68d1e5`. If neither
file was changed in any commits, the prefix is added to `default`, so the key in the
example would be `rspec-default`.
For example, adding a `prefix` of `test`, will cause keys to look like: `test-feef9576d21ee9b6a32e30c5c79d0a0ceb68d1e5`.
If neither file was changed in any commits, the prefix is added to `default`, so the
key in the example would be `test-default`.
 
`prefix` follows the same restrictions as `key`, so it can use any of the
[predefined variables](../variables/README.md). Similarly, the `/` character or the
equivalent URI-encoded `%2F`, or a value made only of `.` or `%2E`, is not allowed.
Like `cache:key`, `prefix` can use any of the [predefined variables](../variables/README.md),
but the following are not allowed:
- the `/` character (or the equivalent URI-encoded `%2F`)
- a value made only of `.` (or the equivalent URI-encoded `%2E`)
 
```yaml
cache:
Loading
Loading
@@ -1577,8 +1588,20 @@ cache:
prefix: ${CI_JOB_NAME}
paths:
- vendor/ruby
rspec:
script:
- bundle exec rspec
```
 
For example, adding a `prefix` of `$CI_JOB_NAME` will
cause the key to look like: `rspec-feef9576d21ee9b6a32e30c5c79d0a0ceb68d1e5` and
the job cache is shared across different branches. If a branch changes
`Gemfile.lock`, that branch will have a new SHA checksum for `cache:key:files`. A new cache key
will be generated, and a new cache will be created for that key.
If `Gemfile.lock` is not found, the prefix is added to
`default`, so the key in the example would be `rspec-default`.
#### `cache:untracked`
 
Set `untracked: true` to cache all files that are untracked in your Git
Loading
Loading
Loading
Loading
@@ -7,7 +7,7 @@ module Quality
class HelmClient
CommandFailedError = Class.new(StandardError)
 
attr_reader :namespace
attr_reader :tiller_namespace, :namespace
 
RELEASE_JSON_ATTRIBUTES = %w[Name Revision Updated Status Chart AppVersion Namespace].freeze
 
Loading
Loading
@@ -24,7 +24,8 @@ module Quality
# A single page of data and the corresponding page number.
Page = Struct.new(:releases, :number)
 
def initialize(namespace:)
def initialize(tiller_namespace:, namespace:)
@tiller_namespace = tiller_namespace
@namespace = namespace
end
 
Loading
Loading
@@ -35,7 +36,7 @@ module Quality
def delete(release_name:)
run_command([
'delete',
%(--tiller-namespace "#{namespace}"),
%(--tiller-namespace "#{tiller_namespace}"),
'--purge',
release_name
])
Loading
Loading
@@ -60,7 +61,7 @@ module Quality
command = [
'list',
%(--namespace "#{namespace}"),
%(--tiller-namespace "#{namespace}" --output json),
%(--tiller-namespace "#{tiller_namespace}" --output json),
*args
]
json = JSON.parse(run_command(command))
Loading
Loading
Loading
Loading
@@ -25,7 +25,6 @@ class AutomatedCleanup
def initialize(project_path: ENV['CI_PROJECT_PATH'], gitlab_token: ENV['GITLAB_BOT_REVIEW_APPS_CLEANUP_TOKEN'])
@project_path = project_path
@gitlab_token = gitlab_token
ENV['TILLER_NAMESPACE'] ||= review_apps_namespace
end
 
def gitlab
Loading
Loading
@@ -45,7 +44,9 @@ class AutomatedCleanup
end
 
def helm
@helm ||= Quality::HelmClient.new(namespace: review_apps_namespace)
@helm ||= Quality::HelmClient.new(
tiller_namespace: review_apps_namespace,
namespace: review_apps_namespace)
end
 
def kubernetes
Loading
Loading
[[ "$TRACE" ]] && set -x
export TILLER_NAMESPACE="$KUBE_NAMESPACE"
 
function deploy_exists() {
local namespace="${1}"
local deploy="${2}"
echoinfo "Checking if ${deploy} exists in the ${namespace} namespace..." true
local release="${2}"
local deploy_exists
 
helm status --tiller-namespace "${namespace}" "${deploy}" >/dev/null 2>&1
local deploy_exists=$?
echoinfo "Checking if ${release} exists in the ${namespace} namespace..." true
 
echoinfo "Deployment status for ${deploy} is ${deploy_exists}"
helm status --tiller-namespace "${namespace}" "${release}" >/dev/null 2>&1
deploy_exists=$?
echoinfo "Deployment status for ${release} is ${deploy_exists}"
return $deploy_exists
}
 
function previous_deploy_failed() {
local deploy="${1}"
echoinfo "Checking for previous deployment of ${deploy}" true
local namespace="${1}"
local release="${2}"
echoinfo "Checking for previous deployment of ${release}" true
 
helm status "${deploy}" >/dev/null 2>&1
helm status --tiller-namespace "${namespace}" "${release}" >/dev/null 2>&1
local status=$?
 
# if `status` is `0`, deployment exists, has a status
if [ $status -eq 0 ]; then
echoinfo "Previous deployment found, checking status..."
deployment_status=$(helm status "${deploy}" | grep ^STATUS | cut -d' ' -f2)
deployment_status=$(helm status --tiller-namespace "${namespace}" "${release}" | grep ^STATUS | cut -d' ' -f2)
echoinfo "Previous deployment state: ${deployment_status}"
if [[ "$deployment_status" == "FAILED" || "$deployment_status" == "PENDING_UPGRADE" || "$deployment_status" == "PENDING_INSTALL" ]]; then
status=0;
Loading
Loading
@@ -37,30 +40,34 @@ function previous_deploy_failed() {
}
 
function delete_release() {
if [ -z "$CI_ENVIRONMENT_SLUG" ]; then
local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
if [ -z "${release}" ]; then
echoerr "No release given, aborting the delete!"
return
fi
 
local name="$CI_ENVIRONMENT_SLUG"
echoinfo "Deleting release '$name'..." true
echoinfo "Deleting release '${release}'..." true
 
helm delete --purge "$name"
helm delete --tiller-namespace "${namespace}" --purge "${release}"
}
 
function delete_failed_release() {
if [ -z "$CI_ENVIRONMENT_SLUG" ]; then
local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
if [ -z "${release}" ]; then
echoerr "No release given, aborting the delete!"
return
fi
 
if ! deploy_exists "${KUBE_NAMESPACE}" "${CI_ENVIRONMENT_SLUG}"; then
echoinfo "No Review App with ${CI_ENVIRONMENT_SLUG} is currently deployed."
if ! deploy_exists "${namespace}" "${release}"; then
echoinfo "No Review App with ${release} is currently deployed."
else
# Cleanup and previous installs, as FAILED and PENDING_UPGRADE will cause errors with `upgrade`
if previous_deploy_failed "$CI_ENVIRONMENT_SLUG" ; then
echoinfo "Review App deployment in bad state, cleaning up $CI_ENVIRONMENT_SLUG"
if previous_deploy_failed "${namespace}" "${release}" ; then
echoinfo "Review App deployment in bad state, cleaning up ${release}"
delete_release
else
echoinfo "Review App deployment in good state"
Loading
Loading
@@ -70,9 +77,12 @@ function delete_failed_release() {
 
 
function get_pod() {
local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
local app_name="${1}"
local status="${2-Running}"
get_pod_cmd="kubectl get pods -n ${KUBE_NAMESPACE} --field-selector=status.phase=${status} -lapp=${app_name},release=${CI_ENVIRONMENT_SLUG} --no-headers -o=custom-columns=NAME:.metadata.name | tail -n 1"
get_pod_cmd="kubectl get pods --namespace ${namespace} --field-selector=status.phase=${status} -lapp=${app_name},release=${release} --no-headers -o=custom-columns=NAME:.metadata.name | tail -n 1"
echoinfo "Waiting till '${app_name}' pod is ready" true
echoinfo "Running '${get_pod_cmd}'"
 
Loading
Loading
@@ -111,19 +121,24 @@ function check_kube_domain() {
}
 
function ensure_namespace() {
echoinfo "Ensuring the ${KUBE_NAMESPACE} namespace exists..." true
local namespace="${KUBE_NAMESPACE}"
echoinfo "Ensuring the ${namespace} namespace exists..." true
 
kubectl describe namespace "$KUBE_NAMESPACE" || kubectl create namespace "$KUBE_NAMESPACE"
kubectl describe namespace "${namespace}" || kubectl create namespace "${namespace}"
}
 
function install_tiller() {
echoinfo "Checking deployment/tiller-deploy status in the ${TILLER_NAMESPACE} namespace..." true
local namespace="${KUBE_NAMESPACE}"
echoinfo "Checking deployment/tiller-deploy status in the ${namespace} namespace..." true
 
echoinfo "Initiating the Helm client..."
helm init --client-only
 
# Set toleration for Tiller to be installed on a specific node pool
helm init \
--tiller-namespace "${namespace}" \
--wait \
--upgrade \
--node-selectors "app=helm" \
Loading
Loading
@@ -133,34 +148,38 @@ function install_tiller() {
--override "spec.template.spec.tolerations[0].value"="helm" \
--override "spec.template.spec.tolerations[0].effect"="NoSchedule"
 
kubectl rollout status -n "$TILLER_NAMESPACE" -w "deployment/tiller-deploy"
kubectl rollout status --namespace "${namespace}" --watch "deployment/tiller-deploy"
 
if ! helm version --debug; then
if ! helm version --tiller-namespace "${namespace}" --debug; then
echo "Failed to init Tiller."
return 1
fi
}
 
function install_external_dns() {
local release_name="dns-gitlab-review-app"
local namespace="${KUBE_NAMESPACE}"
local release="dns-gitlab-review-app"
local domain
domain=$(echo "${REVIEW_APPS_DOMAIN}" | awk -F. '{printf "%s.%s", $(NF-1), $NF}')
echoinfo "Installing external DNS for domain ${domain}..." true
 
if ! deploy_exists "${KUBE_NAMESPACE}" "${release_name}" || previous_deploy_failed "${release_name}" ; then
if ! deploy_exists "${namespace}" "${release}" || previous_deploy_failed "${namespace}" "${release}" ; then
echoinfo "Installing external-dns Helm chart"
helm repo update
helm repo update --tiller-namespace "${namespace}"
# Default requested: CPU => 0, memory => 0
helm install stable/external-dns --version '^2.2.1' \
-n "${release_name}" \
--namespace "${KUBE_NAMESPACE}" \
helm install stable/external-dns \
--tiller-namespace "${namespace}" \
--namespace "${namespace}" \
--version '^2.2.1' \
--name "${release}" \
--set provider="aws" \
--set aws.credentials.secretKey="${REVIEW_APPS_AWS_SECRET_KEY}" \
--set aws.credentials.accessKey="${REVIEW_APPS_AWS_ACCESS_KEY}" \
--set aws.zoneType="public" \
--set aws.batchChangeSize=400 \
--set domainFilters[0]="${domain}" \
--set txtOwnerId="${KUBE_NAMESPACE}" \
--set txtOwnerId="${namespace}" \
--set rbac.create="true" \
--set policy="sync" \
--set resources.requests.cpu=50m \
Loading
Loading
@@ -173,21 +192,24 @@ function install_external_dns() {
}
 
function create_application_secret() {
echoinfo "Creating the ${CI_ENVIRONMENT_SLUG}-gitlab-initial-root-password secret in the ${KUBE_NAMESPACE} namespace..." true
local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
echoinfo "Creating the ${release}-gitlab-initial-root-password secret in the ${namespace} namespace..." true
 
kubectl create secret generic -n "$KUBE_NAMESPACE" \
"${CI_ENVIRONMENT_SLUG}-gitlab-initial-root-password" \
kubectl create secret generic --namespace "${namespace}" \
"${release}-gitlab-initial-root-password" \
--from-literal="password=${REVIEW_APPS_ROOT_PASSWORD}" \
--dry-run -o json | kubectl apply -f -
 
if [ -z "${REVIEW_APPS_EE_LICENSE}" ]; then echo "License not found" && return; fi
 
echoinfo "Creating the ${CI_ENVIRONMENT_SLUG}-gitlab-license secret in the ${KUBE_NAMESPACE} namespace..." true
echoinfo "Creating the ${release}-gitlab-license secret in the ${namespace} namespace..." true
 
echo "${REVIEW_APPS_EE_LICENSE}" > /tmp/license.gitlab
 
kubectl create secret generic -n "$KUBE_NAMESPACE" \
"${CI_ENVIRONMENT_SLUG}-gitlab-license" \
kubectl create secret generic --namespace "${namespace}" \
"${release}-gitlab-license" \
--from-file=license=/tmp/license.gitlab \
--dry-run -o json | kubectl apply -f -
}
Loading
Loading
@@ -213,13 +235,14 @@ function base_config_changed() {
}
 
function deploy() {
local name="$CI_ENVIRONMENT_SLUG"
local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
local edition="${GITLAB_EDITION-ce}"
local base_config_file_ref="master"
if [[ "$(base_config_changed)" == "true" ]]; then base_config_file_ref="$CI_COMMIT_SHA"; fi
if [[ "$(base_config_changed)" == "true" ]]; then base_config_file_ref="${CI_COMMIT_SHA}"; fi
local base_config_file="https://gitlab.com/gitlab-org/gitlab/raw/${base_config_file_ref}/scripts/review_apps/base-config.yaml"
 
echoinfo "Deploying ${name}..." true
echoinfo "Deploying ${release}..." true
 
IMAGE_REPOSITORY="registry.gitlab.com/gitlab-org/build/cng-mirror"
gitlab_migrations_image_repository="${IMAGE_REPOSITORY}/gitlab-rails-${edition}"
Loading
Loading
@@ -233,47 +256,49 @@ function deploy() {
create_application_secret
 
HELM_CMD=$(cat << EOF
helm upgrade --install \
helm upgrade \
--tiller-namespace="${namespace}" \
--namespace="${namespace}" \
--install \
--wait \
--timeout 900 \
--set ci.branch="$CI_COMMIT_REF_NAME" \
--set ci.commit.sha="$CI_COMMIT_SHORT_SHA" \
--set ci.job.url="$CI_JOB_URL" \
--set ci.pipeline.url="$CI_PIPELINE_URL" \
--set releaseOverride="$CI_ENVIRONMENT_SLUG" \
--set global.hosts.hostSuffix="$HOST_SUFFIX" \
--set global.hosts.domain="$REVIEW_APPS_DOMAIN" \
--set gitlab.migrations.image.repository="$gitlab_migrations_image_repository" \
--set gitlab.migrations.image.tag="$CI_COMMIT_REF_SLUG" \
--set gitlab.gitaly.image.repository="$gitlab_gitaly_image_repository" \
--set gitlab.gitaly.image.tag="v$GITALY_VERSION" \
--set gitlab.gitlab-shell.image.repository="$gitlab_shell_image_repository" \
--set gitlab.gitlab-shell.image.tag="v$GITLAB_SHELL_VERSION" \
--set gitlab.sidekiq.image.repository="$gitlab_sidekiq_image_repository" \
--set gitlab.sidekiq.image.tag="$CI_COMMIT_REF_SLUG" \
--set gitlab.unicorn.image.repository="$gitlab_unicorn_image_repository" \
--set gitlab.unicorn.image.tag="$CI_COMMIT_REF_SLUG" \
--set gitlab.unicorn.workhorse.image="$gitlab_workhorse_image_repository" \
--set gitlab.unicorn.workhorse.tag="$CI_COMMIT_REF_SLUG" \
--set gitlab.task-runner.image.repository="$gitlab_task_runner_image_repository" \
--set gitlab.task-runner.image.tag="$CI_COMMIT_REF_SLUG"
--set ci.branch="${CI_COMMIT_REF_NAME}" \
--set ci.commit.sha="${CI_COMMIT_SHORT_SHA}" \
--set ci.job.url="${CI_JOB_URL}" \
--set ci.pipeline.url="${CI_PIPELINE_URL}" \
--set releaseOverride="${release}" \
--set global.hosts.hostSuffix="${HOST_SUFFIX}" \
--set global.hosts.domain="${REVIEW_APPS_DOMAIN}" \
--set gitlab.migrations.image.repository="${gitlab_migrations_image_repository}" \
--set gitlab.migrations.image.tag="${CI_COMMIT_REF_SLUG}" \
--set gitlab.gitaly.image.repository="${gitlab_gitaly_image_repository}" \
--set gitlab.gitaly.image.tag="v${GITALY_VERSION}" \
--set gitlab.gitlab-shell.image.repository="${gitlab_shell_image_repository}" \
--set gitlab.gitlab-shell.image.tag="v${GITLAB_SHELL_VERSION}" \
--set gitlab.sidekiq.image.repository="${gitlab_sidekiq_image_repository}" \
--set gitlab.sidekiq.image.tag="${CI_COMMIT_REF_SLUG}" \
--set gitlab.unicorn.image.repository="${gitlab_unicorn_image_repository}" \
--set gitlab.unicorn.image.tag="${CI_COMMIT_REF_SLUG}" \
--set gitlab.unicorn.workhorse.image="${gitlab_workhorse_image_repository}" \
--set gitlab.unicorn.workhorse.tag="${CI_COMMIT_REF_SLUG}" \
--set gitlab.task-runner.image.repository="${gitlab_task_runner_image_repository}" \
--set gitlab.task-runner.image.tag="${CI_COMMIT_REF_SLUG}"
EOF
)
 
if [ -n "${REVIEW_APPS_EE_LICENSE}" ]; then
HELM_CMD=$(cat << EOF
${HELM_CMD} \
--set global.gitlab.license.secret="${CI_ENVIRONMENT_SLUG}-gitlab-license"
--set global.gitlab.license.secret="${release}-gitlab-license"
EOF
)
fi
 
HELM_CMD=$(cat << EOF
${HELM_CMD} \
--namespace="$KUBE_NAMESPACE" \
--version="${CI_PIPELINE_ID}-${CI_JOB_ID}" \
-f "${base_config_file}" \
"${name}" .
"${release}" .
EOF
)
 
Loading
Loading
@@ -284,11 +309,14 @@ EOF
}
 
function display_deployment_debug() {
local namespace="${KUBE_NAMESPACE}"
local release="${CI_ENVIRONMENT_SLUG}"
# Get all pods for this release
echoinfo "Pods for release ${CI_ENVIRONMENT_SLUG}"
kubectl get pods -n "$KUBE_NAMESPACE" -lrelease=${CI_ENVIRONMENT_SLUG}
echoinfo "Pods for release ${release}"
kubectl get pods --namespace "${namespace}" -lrelease=${release}
 
# Get all non-completed jobs
echoinfo "Unsuccessful Jobs for release ${CI_ENVIRONMENT_SLUG}"
kubectl get jobs -n "$KUBE_NAMESPACE" -lrelease=${CI_ENVIRONMENT_SLUG} --field-selector=status.successful!=1
echoinfo "Unsuccessful Jobs for release ${release}"
kubectl get jobs --namespace "${namespace}" -lrelease=${release} --field-selector=status.successful!=1
}
# frozen_string_literal: true
require 'spec_helper'
describe Mutations::Todos::MarkAllDone do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :pending) }
let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :done) }
let_it_be(:todo3) { create(:todo, user: current_user, author: author, state: :pending) }
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :pending) }
let_it_be(:user3) { create(:user) }
describe '#resolve' do
it 'marks all pending todos as done' do
updated_todo_ids = mutation_for(current_user).resolve.dig(:updated_ids)
expect(todo1.reload.state).to eq('done')
expect(todo2.reload.state).to eq('done')
expect(todo3.reload.state).to eq('done')
expect(other_user_todo.reload.state).to eq('pending')
expect(updated_todo_ids).to contain_exactly(global_id_of(todo1), global_id_of(todo3))
end
it 'behaves as expected if there are no todos for the requesting user' do
updated_todo_ids = mutation_for(user3).resolve.dig(:updated_ids)
expect(todo1.reload.state).to eq('pending')
expect(todo2.reload.state).to eq('done')
expect(todo3.reload.state).to eq('pending')
expect(other_user_todo.reload.state).to eq('pending')
expect(updated_todo_ids).to be_empty
end
context 'when user is not logged in' do
it 'fails with the expected error' do
expect { mutation_for(nil).resolve }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end
def mutation_for(user)
described_class.new(object: nil, context: { current_user: user })
end
end
Loading
Loading
@@ -3,6 +3,8 @@
require 'spec_helper'
 
describe Mutations::Todos::MarkDone do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:other_user) { create(:user) }
Loading
Loading
@@ -59,8 +61,4 @@ describe Mutations::Todos::MarkDone do
def mark_done_mutation(todo)
mutation.resolve(id: global_id_of(todo))
end
def global_id_of(todo)
todo.to_global_id.to_s
end
end
Loading
Loading
@@ -11,7 +11,7 @@ import '~/boards/models/list';
import '~/boards/services/board_service';
import boardsStore from '~/boards/stores/boards_store';
import eventHub from '~/boards/eventhub';
import { listObj, listObjDuplicate, boardsMockInterceptor, mockBoardService } from './mock_data';
import { listObj, listObjDuplicate, boardsMockInterceptor } from './mock_data';
import waitForPromises from '../../frontend/helpers/wait_for_promises';
 
describe('Store', () => {
Loading
Loading
@@ -20,17 +20,16 @@ describe('Store', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
mock.onAny().reply(boardsMockInterceptor);
gl.boardService = mockBoardService();
boardsStore.create();
 
spyOn(gl.boardService, 'moveIssue').and.callFake(
spyOn(boardsStore, 'moveIssue').and.callFake(
() =>
new Promise(resolve => {
resolve();
}),
);
 
spyOn(gl.boardService, 'moveMultipleIssues').and.callFake(
spyOn(boardsStore, 'moveMultipleIssues').and.callFake(
() =>
new Promise(resolve => {
resolve();
Loading
Loading
@@ -263,7 +262,7 @@ describe('Store', () => {
expect(listOne.issues.length).toBe(0);
expect(listTwo.issues.length).toBe(2);
expect(listTwo.issues[0].id).toBe(2);
expect(gl.boardService.moveIssue).toHaveBeenCalledWith(2, listOne.id, listTwo.id, null, 1);
expect(boardsStore.moveIssue).toHaveBeenCalledWith(2, listOne.id, listTwo.id, null, 1);
 
done();
}, 0);
Loading
Loading
@@ -286,7 +285,7 @@ describe('Store', () => {
expect(listOne.issues.length).toBe(0);
expect(listTwo.issues.length).toBe(2);
expect(listTwo.issues[1].id).toBe(2);
expect(gl.boardService.moveIssue).toHaveBeenCalledWith(2, listOne.id, listTwo.id, 1, null);
expect(boardsStore.moveIssue).toHaveBeenCalledWith(2, listOne.id, listTwo.id, 1, null);
 
done();
}, 0);
Loading
Loading
@@ -311,7 +310,7 @@ describe('Store', () => {
boardsStore.moveIssueInList(list, issue, 0, 1, [1, 2]);
 
expect(list.issues[0].id).toBe(2);
expect(gl.boardService.moveIssue).toHaveBeenCalledWith(2, null, null, 1, null);
expect(boardsStore.moveIssue).toHaveBeenCalledWith(2, null, null, 1, null);
 
done();
});
Loading
Loading
@@ -495,7 +494,7 @@ describe('Store', () => {
 
expect(list.issues[0].id).toBe(issue1.id);
 
expect(gl.boardService.moveMultipleIssues).toHaveBeenCalledWith({
expect(boardsStore.moveMultipleIssues).toHaveBeenCalledWith({
ids: [issue1.id, issue2.id],
fromListId: null,
toListId: null,
Loading
Loading
Loading
Loading
@@ -12,7 +12,7 @@ import '~/boards/models/issue';
import '~/boards/models/list';
import '~/boards/services/board_service';
import boardsStore from '~/boards/stores/boards_store';
import { listObj, listObjDuplicate, boardsMockInterceptor, mockBoardService } from './mock_data';
import { listObj, listObjDuplicate, boardsMockInterceptor } from './mock_data';
 
describe('List model', () => {
let list;
Loading
Loading
@@ -21,9 +21,6 @@ describe('List model', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
mock.onAny().reply(boardsMockInterceptor);
gl.boardService = mockBoardService({
bulkUpdatePath: '/test/issue-boards/board/1/lists',
});
boardsStore.create();
 
list = new List(listObj);
Loading
Loading
@@ -110,11 +107,11 @@ describe('List model', () => {
list.issues.push(issue);
listDup.issues.push(issue);
 
spyOn(gl.boardService, 'moveIssue').and.callThrough();
spyOn(boardsStore, 'moveIssue').and.callThrough();
 
listDup.updateIssueLabel(issue, list);
 
expect(gl.boardService.moveIssue).toHaveBeenCalledWith(
expect(boardsStore.moveIssue).toHaveBeenCalledWith(
issue.id,
list.id,
listDup.id,
Loading
Loading
@@ -172,7 +169,7 @@ describe('List model', () => {
 
describe('newIssue', () => {
beforeEach(() => {
spyOn(gl.boardService, 'newIssue').and.returnValue(
spyOn(boardsStore, 'newIssue').and.returnValue(
Promise.resolve({
data: {
id: 42,
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment