......@@ -12,6 +12,24 @@ stages:
- post-qa
- pages
workflow:
rules:
# If `$FORCE_GITLAB_CI` is set, create a pipeline.
- if: '$FORCE_GITLAB_CI'
# For merge requests, create a pipeline.
- if: '$CI_MERGE_REQUEST_IID'
# For `master` branch, create a pipeline (this includes on schedules, pushes, merges, etc.).
- if: '$CI_COMMIT_BRANCH == "master"'
# For tags, create a pipeline.
- if: '$CI_COMMIT_TAG'
# If `$GITLAB_INTERNAL` isn't set, don't create a pipeline.
- if: '$GITLAB_INTERNAL == null'
when: never
# For stable, auto-deploy, and security branches, create a pipeline.
- if: '$CI_COMMIT_BRANCH =~ /^[\d-]+-stable(-ee)?$/'
- if: '$CI_COMMIT_BRANCH =~ /^\d+-\d+-auto-deploy-\d+$/'
- if: '$CI_COMMIT_BRANCH =~ /^security\//'
variables:
RAILS_ENV: "test"
NODE_ENV: "test"
......
......
......@@ -45,10 +45,13 @@ export default {
:class="{ active: isActive }"
class="stage-nav-item d-flex pl-4 pr-4 m-0 mb-1 ml-2 rounded border-color-default border-style-solid border-width-1px"
>
<div class="stage-nav-item-cell stage-name p-0" :class="{ 'font-weight-bold': isActive }">
<div
class="stage-nav-item-cell stage-name w-50 pr-2"
:class="{ 'font-weight-bold': isActive }"
>
{{ title }}
</div>
<div class="stage-nav-item-cell stage-median mr-4">
<div class="stage-nav-item-cell stage-median w-50">
<template v-if="isUserAllowed">
<span v-if="hasValue">{{ value }}</span>
<span v-else class="stage-empty">{{ __('Not enough data') }}</span>
......
......
......@@ -51,11 +51,11 @@
}
.stage-header {
width: 18.5%;
width: 20.5%;
}
.median-header {
width: 21.5%;
width: 19.5%;
}
.event-header {
......
......
......@@ -28,4 +28,11 @@ module SnippetsActions
def convert_line_endings(content)
params[:line_ending] == 'raw' ? content : content.gsub(/\r\n/, "\n")
end
def check_repository_error
repository_error = snippet.errors.delete(:repository)
flash.now[:alert] = repository_error if repository_error
recaptcha_check_with_fallback(repository_error.nil?) { render :edit }
end
end
......@@ -62,7 +62,7 @@ class Projects::SnippetsController < Projects::ApplicationController
service_response = Snippets::UpdateService.new(project, current_user, update_params).execute(@snippet)
@snippet = service_response.payload[:snippet]
recaptcha_check_with_fallback { render :edit }
check_repository_error
end
def show
......
......
......@@ -12,6 +12,8 @@ module Repositories
rescue_from Gitlab::GitAccess::ProjectCreationError, with: :render_422_with_exception
rescue_from Gitlab::GitAccess::TimeoutError, with: :render_503_with_exception
before_action :snippet_request_allowed?
# GET /foo/bar.git/info/refs?service=git-upload-pack (git pull)
# GET /foo/bar.git/info/refs?service=git-receive-pack (git push)
def info_refs
......@@ -116,6 +118,12 @@ module Repositories
def log_user_activity
Users::ActivityService.new(user).execute
end
def snippet_request_allowed?
if repo_type.snippet? && Feature.disabled?(:version_snippets, user)
render plain: 'The project you were looking for could not be found.', status: :not_found
end
end
end
end
......
......
......@@ -64,7 +64,7 @@ class SnippetsController < ApplicationController
service_response = Snippets::UpdateService.new(nil, current_user, update_params).execute(@snippet)
@snippet = service_response.payload[:snippet]
recaptcha_check_with_fallback { render :edit }
check_repository_error
end
def show
......
......
......@@ -260,12 +260,14 @@ module Issuable
highest_priority = highest_label_priority(params).to_sql
select_columns = [
"#{table_name}.*",
"(#{highest_priority}) AS highest_priority"
] + extra_select_columns
# When using CTE make sure to select the same columns that are on the group_by clause.
# This prevents errors when ignored columns are present in the database.
issuable_columns = with_cte ? issue_grouping_columns(use_cte: with_cte) : "#{table_name}.*"
select(select_columns.join(', '))
extra_select_columns = extra_select_columns.unshift("(#{highest_priority}) AS highest_priority")
select(issuable_columns)
.select(extra_select_columns)
.group(issue_grouping_columns(use_cte: with_cte))
.reorder(Gitlab::Database.nulls_last_order('highest_priority', direction))
end
......@@ -301,7 +303,7 @@ module Issuable
# Returns an array of arel columns
def issue_grouping_columns(use_cte: false)
if use_cte
[arel_table[:state]] + attribute_names.map { |attr| arel_table[attr.to_sym] }
attribute_names.map { |attr| arel_table[attr.to_sym] }
else
arel_table[:id]
end
......
......
......@@ -56,8 +56,13 @@ class PipelineSerializer < BaseSerializer
:manual_actions,
:scheduled_actions,
:artifacts,
:merge_request,
:user,
{
merge_request: {
source_project: [:route, { namespace: :route }],
target_project: [:route, { namespace: :route }]
}
},
{
pending_builds: :project,
project: [:route, { namespace: :route }],
......
......
......@@ -52,7 +52,7 @@ module Snippets
create_commit(snippet) if snippet.repository_exists?
end
rescue
snippet.errors.add(:base, 'Error updating the snippet')
snippet.errors.add(:repository, 'Error updating the snippet')
false
end
......
......
---
title: Support finding namespace by ID or path on fork API
merge_request: 20603
author: leoleoasd
type: fixed
---
title: Show git error message updating snippet
merge_request: 26570
author:
type: fixed
---
title: Fix N+1 queries for PipelinesController#index.json
merge_request: 26643
author:
type: performance
......@@ -35,10 +35,14 @@ the SSH configuration of your server by adding the line below to the `/etc/ssh/s
AcceptEnv GIT_PROTOCOL
```
Once configured, restart the SSH daemon. In Ubuntu, run:
Once configured, restart the SSH daemon for the change to take effect:
```shell
sudo service ssh restart
# CentOS 6 / RHEL 6
sudo service sshd restart
# All other supported distributions
sudo systemctl restart ssh
```
## Instructions
......
......
......@@ -202,14 +202,8 @@ On different cloud vendors a best effort like for like can be used.
and another for the Queues and Shared State classes respectively. We also recommend
that you run the Redis Sentinel clusters separately as well for each Redis Cluster.
[^4]: For data objects such as LFS, Uploads, Artifacts, etc... We recommend a Cloud Object Storage
where possible over NFS due to better performance and availability. Several types of objects
are supported for S3 storage - [Job artifacts](../job_artifacts.md#using-object-storage),
[LFS](../lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage),
[Uploads](../uploads.md#using-object-storage-core-only),
[Merge Request Diffs](../merge_request_diffs.md#using-object-storage),
[Packages](../packages/index.md#using-object-storage) (Optional Feature),
[Dependency Proxy](../packages/dependency_proxy.md#using-object-storage) (Optional Feature).
[^4]: For data objects such as LFS, Uploads, Artifacts, etc. We recommend a [Cloud Object Storage service](object_storage.md)
over NFS where possible, due to better performance and availability.
[^5]: NFS can be used as an alternative for both repository data (replacing Gitaly) and
object storage but this isn't typically recommended for performance reasons. Note however it is required for
......
......
......@@ -736,10 +736,14 @@ To enable the read-only mode:
This will set the Container Registry into the read only mode.
1. Next, trigger the garbage collect command:
1. Next, trigger one of the garbage collect commands:
```sh
# Recycling unused tags
sudo /opt/gitlab/embedded/bin/registry garbage-collect /var/opt/gitlab/registry/config.yml
# Removing unused layers not referenced by manifests
sudo /opt/gitlab/embedded/bin/registry garbage-collect -m /var/opt/gitlab/registry/config.yml
```
This will start the garbage collection, which might take some time to complete.
......
......
......@@ -1207,7 +1207,9 @@ POST /projects/:id/fork
| Attribute | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) |
| `namespace` | integer/string | yes | The ID or path of the namespace that the project will be forked to |
| `namespace` | integer/string | no | (deprecated) The ID or path of the namespace that the project will be forked to |
| `namespace_id` | integer | no | The ID of the namespace that the project will be forked to |
| `namespace_path` | string | no | The path of the namespace that the project will be forked to |
| `path` | string | no | The path that will be assigned to the resultant project after forking |
| `name` | string | no | The name that will be assigned to the resultant project after forking |
......
......
......@@ -42,7 +42,7 @@ The default image is currently
`registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.6.5-golang-1.12-git-2.24-lfs-2.9-chrome-73.0-node-12.x-yarn-1.21-postgresql-9.6-graphicsmagick-1.3.34`.
It includes Ruby 2.6.5, Go 1.12, Git 2.24, Git LFS 2.9, Chrome 73, Node 12, Yarn 1.21,
PostgreSQL 9.6, and Graphics Magick 1.3.33.
PostgreSQL 9.6, and Graphics Magick 1.3.34.
The images used in our pipelines are configured in the
[`gitlab-org/gitlab-build-images`](https://gitlab.com/gitlab-org/gitlab-build-images)
......@@ -61,8 +61,8 @@ each pipeline includes default variables defined in
## Common job definitions
Most of the jobs [extend from a few CI definitions](../ci/yaml/README.md#extends)
that are scoped to a single
[configuration parameter](../ci/yaml/README.md#configuration-parameters).
defined in [`.gitlab/ci/global.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/.gitlab/ci/global.gitlab-ci.yml)
that are scoped to a single [configuration parameter](../ci/yaml/README.md#configuration-parameters).
| Job definitions | Description |
|------------------|-------------|
......@@ -72,10 +72,27 @@ that are scoped to a single
| `.default-cache` | Allows a job to use a default `cache` definition suitable for Ruby/Rails and frontend tasks. |
| `.use-pg9` | Allows a job to use the `postgres:9.6.17` and `redis:alpine` services. |
| `.use-pg10` | Allows a job to use the `postgres:10.12` and `redis:alpine` services. |
| `.use-pg11` | Allows a job to use the `postgres:11.6` and `redis:alpine` services. |
| `.use-pg9-ee` | Same as `.use-pg9` but also use the `docker.elastic.co/elasticsearch/elasticsearch:6.4.2` services. |
| `.use-pg10-ee` | Same as `.use-pg10` but also use the `docker.elastic.co/elasticsearch/elasticsearch:6.4.2` services. |
| `.use-pg11-ee` | Same as `.use-pg11` but also use the `docker.elastic.co/elasticsearch/elasticsearch:6.4.2` services. |
| `.as-if-foss` | Simulate the FOSS project by setting the `FOSS_ONLY='1'` environment variable. |
## `workflow:rules`
We're using the [`workflow:rules` keyword](../ci/yaml/README.md#workflowrules) to
define default rules to determine whether or not a pipeline is created.
These rules are defined in <https://gitlab.com/gitlab-org/gitlab/blob/master/.gitlab-ci.yml>
and are as follows:
1. If `$FORCE_GITLAB_CI` is set, create a pipeline.
1. For merge requests, create a pipeline.
1. For `master` branch, create a pipeline (this includes on schedules, pushes, merges, etc.).
1. For tags, create a pipeline.
1. If `$GITLAB_INTERNAL` isn't set, don't create a pipeline.
1. For stable, auto-deploy, and security branches, create a pipeline.
## `rules`, `if:` conditions and `changes:` patterns
We're using the [`rules` keyword](../ci/yaml/README.md#rules) extensively.
......
......
......@@ -331,7 +331,7 @@ metrics:
unit: "count"
```
This will render into:
This works by lowercasing the value of `label` and, if there are more words separated by spaces, replacing those spaces with an underscore (`_`). The transformed value is then checked against the labels of the time series returned by the Prometheus query. If a time series label is found that is equal to the transformed value, then the label value will be used and rendered in the legend like this:
![legend with label shorthand variable](img/prometheus_dashboard_label_variable_shorthand.png)
......
......
......@@ -12,6 +12,9 @@ module API
expose :last_successful_update_at
expose :last_error
expose :only_protected_branches
expose :keep_divergent_refs, if: -> (mirror, _options) do
::Feature.enabled?(:keep_divergent_refs, mirror.project)
end
end
end
end