...@@ -40,7 +40,10 @@ Attach the screenshot and HTML snapshot of the page from the job's artifacts: ...@@ -40,7 +40,10 @@ Attach the screenshot and HTML snapshot of the page from the job's artifacts:
/due in 2 weeks /due in 2 weeks
<!-- Base labels. --> <!-- Base labels. -->
/label ~Quality ~QA ~bug ~S1 /label ~Quality ~QA ~test
<!-- Test failure type label, please use just one.-->
/label ~"failure::broken-test" ~"failure::flaky-test" ~"failure::stale-test" ~"failure::test-environment" ~"failure::investigating"
<!-- <!--
Choose the stage that appears in the test path, e.g. ~"devops::create" for Choose the stage that appears in the test path, e.g. ~"devops::create" for
... ...
......
import Vue from 'vue'; import Vue from 'vue';
import pdfLab from '../../pdf/index.vue'; import PdfViewer from './pdf_viewer.vue';
import { GlLoadingIcon } from '@gitlab/ui';
export default () => { export default () => {
const el = document.getElementById('js-pdf-viewer'); const el = document.getElementById('js-pdf-viewer');
return new Vue({ return new Vue({
el, el,
components: { render(createElement) {
pdfLab, return createElement(PdfViewer, {
GlLoadingIcon, props: {
},
data() {
return {
error: false,
loadError: false,
loading: true,
pdf: el.dataset.endpoint, pdf: el.dataset.endpoint,
};
},
methods: {
onLoad() {
this.loading = false;
},
onError(error) {
this.loading = false;
this.loadError = true;
this.error = error;
}, },
});
}, },
template: `
<div class="js-pdf-viewer container-fluid md prepend-top-default append-bottom-default">
<div
class="text-center loading"
v-if="loading && !error">
<gl-loading-icon class="mt-5" size="lg"/>
</div>
<pdf-lab
v-if="!loadError"
:pdf="pdf"
@pdflabload="onLoad"
@pdflaberror="onError" />
<p
class="text-center"
v-if="error">
<span v-if="loadError">
An error occurred while loading the file. Please try again later.
</span>
<span v-else>
An error occurred while decoding the file.
</span>
</p>
</div>
`,
}); });
}; };
<script>
import PdfLab from '../../pdf/index.vue';
import { GlLoadingIcon } from '@gitlab/ui';
export default {
components: {
PdfLab,
GlLoadingIcon,
},
props: {
pdf: {
type: String,
required: true,
},
},
data() {
return {
error: false,
loadError: false,
loading: true,
};
},
methods: {
onLoad() {
this.loading = false;
},
onError(error) {
this.loading = false;
this.loadError = true;
this.error = error;
},
},
};
</script>
<template>
<div class="js-pdf-viewer container-fluid md prepend-top-default append-bottom-default">
<div v-if="loading && !error" class="text-center loading">
<gl-loading-icon class="mt-5" size="lg" />
</div>
<pdf-lab v-if="!loadError" :pdf="pdf" @pdflabload="onLoad" @pdflaberror="onError" />
<p v-if="error" class="text-center">
<span v-if="loadError" ref="loadError">
{{ __('An error occurred while loading the file. Please try again later.') }}
</span>
<span v-else>{{ __('An error occurred while decoding the file.') }}</span>
</p>
</div>
</template>
...@@ -8,6 +8,7 @@ import axios from './axios_utils'; ...@@ -8,6 +8,7 @@ import axios from './axios_utils';
import { getLocationHash } from './url_utility'; import { getLocationHash } from './url_utility';
import { convertToCamelCase, convertToSnakeCase } from './text_utility'; import { convertToCamelCase, convertToSnakeCase } from './text_utility';
import { isObject } from './type_utility'; import { isObject } from './type_utility';
import { isFunction } from 'lodash';
export const getPagePath = (index = 0) => { export const getPagePath = (index = 0) => {
const page = $('body').attr('data-page') || ''; const page = $('body').attr('data-page') || '';
...@@ -667,30 +668,34 @@ export const spriteIcon = (icon, className = '') => { ...@@ -667,30 +668,34 @@ export const spriteIcon = (icon, className = '') => {
}; };
/** /**
* This method takes in object with snake_case property names * @callback ConversionFunction
* and returns a new object with camelCase property names * @param {string} prop
* */
* Reasoning for this method is to ensure consistent property
* naming conventions across JS code. /**
* This function takes a conversion function as the first parameter
* and applies this function to each prop in the provided object.
* *
* This method also supports additional params in `options` object * This method also supports additional params in `options` object
* *
* @param {ConversionFunction} conversionFunction - Function to apply to each prop of the object.
* @param {Object} obj - Object to be converted. * @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options. * @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting * @param {boolean} options.deep - FLag to allow deep object converting
* @param {Array[]} dropKeys - List of properties to discard while building new object * @param {Array[]} options.dropKeys - List of properties to discard while building new object
* @param {Array[]} ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object * @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/ */
export const convertObjectPropsToCamelCase = (obj = {}, options = {}) => { export const convertObjectProps = (conversionFunction, obj = {}, options = {}) => {
if (obj === null) { if (!isFunction(conversionFunction) || obj === null) {
return {}; return {};
} }
const initial = Array.isArray(obj) ? [] : {};
const { deep = false, dropKeys = [], ignoreKeyNames = [] } = options; const { deep = false, dropKeys = [], ignoreKeyNames = [] } = options;
const isObjParameterArray = Array.isArray(obj);
const initialValue = isObjParameterArray ? [] : {};
return Object.keys(obj).reduce((acc, prop) => { return Object.keys(obj).reduce((acc, prop) => {
const result = acc;
const val = obj[prop]; const val = obj[prop];
// Drop properties from new object if // Drop properties from new object if
...@@ -702,34 +707,54 @@ export const convertObjectPropsToCamelCase = (obj = {}, options = {}) => { ...@@ -702,34 +707,54 @@ export const convertObjectPropsToCamelCase = (obj = {}, options = {}) => {
// Skip converting properties in new object // Skip converting properties in new object
// if there are any mentioned in options // if there are any mentioned in options
if (ignoreKeyNames.indexOf(prop) > -1) { if (ignoreKeyNames.indexOf(prop) > -1) {
result[prop] = obj[prop]; acc[prop] = val;
return acc; return acc;
} }
if (deep && (isObject(val) || Array.isArray(val))) { if (deep && (isObject(val) || Array.isArray(val))) {
result[convertToCamelCase(prop)] = convertObjectPropsToCamelCase(val, options); if (isObjParameterArray) {
acc[prop] = convertObjectProps(conversionFunction, val, options);
} else {
acc[conversionFunction(prop)] = convertObjectProps(conversionFunction, val, options);
}
} else { } else {
result[convertToCamelCase(prop)] = obj[prop]; acc[conversionFunction(prop)] = val;
} }
return acc; return acc;
}, initial); }, initialValue);
}; };
/**
* This method takes in object with snake_case property names
* and returns a new object with camelCase property names
*
* Reasoning for this method is to ensure consistent property
* naming conventions across JS code.
*
* This method also supports additional params in `options` object
*
* @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting
* @param {Array[]} options.dropKeys - List of properties to discard while building new object
* @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/
export const convertObjectPropsToCamelCase = (obj = {}, options = {}) =>
convertObjectProps(convertToCamelCase, obj, options);
/** /**
* Converts all the object keys to snake case * Converts all the object keys to snake case
* *
* @param {Object} obj Object to transform * This method also supports additional params in `options` object
* @returns {Object} *
* @param {Object} obj - Object to be converted.
* @param {Object} options - Object containing additional options.
* @param {boolean} options.deep - FLag to allow deep object converting
* @param {Array[]} options.dropKeys - List of properties to discard while building new object
* @param {Array[]} options.ignoreKeyNames - List of properties to leave intact (as snake_case) while building new object
*/ */
// Follow up to add additional options param: export const convertObjectPropsToSnakeCase = (obj = {}, options = {}) =>
// https://gitlab.com/gitlab-org/gitlab/issues/39173 convertObjectProps(convertToSnakeCase, obj, options);
export const convertObjectPropsToSnakeCase = (obj = {}) =>
obj
? Object.entries(obj).reduce(
(acc, [key, value]) => ({ ...acc, [convertToSnakeCase(key)]: value }),
{},
)
: {};
export const imagePath = imgUrl => export const imagePath = imgUrl =>
`${gon.asset_host || ''}${gon.relative_url_root || ''}/assets/${imgUrl}`; `${gon.asset_host || ''}${gon.relative_url_root || ''}/assets/${imgUrl}`;
... ...
......
...@@ -7,7 +7,7 @@ module Groups ...@@ -7,7 +7,7 @@ module Groups
before_action :authorize_admin_group! before_action :authorize_admin_group!
before_action :authorize_update_max_artifacts_size!, only: [:update] before_action :authorize_update_max_artifacts_size!, only: [:update]
before_action do before_action do
push_frontend_feature_flag(:new_variables_ui, @group) push_frontend_feature_flag(:new_variables_ui, @group, default_enabled: true)
end end
before_action :define_variables, only: [:show, :create_deploy_token] before_action :define_variables, only: [:show, :create_deploy_token]
... ...
......
...@@ -21,7 +21,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -21,7 +21,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
before_action only: [:show] do before_action only: [:show] do
push_frontend_feature_flag(:diffs_batch_load, @project, default_enabled: true) push_frontend_feature_flag(:diffs_batch_load, @project, default_enabled: true)
push_frontend_feature_flag(:deploy_from_footer, @project, default_enabled: true) push_frontend_feature_flag(:deploy_from_footer, @project, default_enabled: true)
push_frontend_feature_flag(:single_mr_diff_view, @project) push_frontend_feature_flag(:single_mr_diff_view, @project, default_enabled: true)
push_frontend_feature_flag(:suggest_pipeline) if experiment_enabled?(:suggest_pipeline) push_frontend_feature_flag(:suggest_pipeline) if experiment_enabled?(:suggest_pipeline)
end end
... ...
......
...@@ -6,7 +6,7 @@ module Projects ...@@ -6,7 +6,7 @@ module Projects
before_action :authorize_admin_pipeline! before_action :authorize_admin_pipeline!
before_action :define_variables before_action :define_variables
before_action do before_action do
push_frontend_feature_flag(:new_variables_ui, @project) push_frontend_feature_flag(:new_variables_ui, @project, default_enabled: true)
end end
def show def show
... ...
......
...@@ -28,7 +28,8 @@ module Ci ...@@ -28,7 +28,8 @@ module Ci
license_scanning: 'gl-license-scanning-report.json', license_scanning: 'gl-license-scanning-report.json',
performance: 'performance.json', performance: 'performance.json',
metrics: 'metrics.txt', metrics: 'metrics.txt',
lsif: 'lsif.json' lsif: 'lsif.json',
dotenv: '.env'
}.freeze }.freeze
INTERNAL_TYPES = { INTERNAL_TYPES = {
...@@ -43,6 +44,7 @@ module Ci ...@@ -43,6 +44,7 @@ module Ci
metrics_referee: :gzip, metrics_referee: :gzip,
network_referee: :gzip, network_referee: :gzip,
lsif: :gzip, lsif: :gzip,
dotenv: :gzip,
# All these file formats use `raw` as we need to store them uncompressed # All these file formats use `raw` as we need to store them uncompressed
# for Frontend to fetch the files and do analysis # for Frontend to fetch the files and do analysis
...@@ -118,7 +120,8 @@ module Ci ...@@ -118,7 +120,8 @@ module Ci
metrics: 12, ## EE-specific metrics: 12, ## EE-specific
metrics_referee: 13, ## runner referees metrics_referee: 13, ## runner referees
network_referee: 14, ## runner referees network_referee: 14, ## runner referees
lsif: 15 # LSIF data for code navigation lsif: 15, # LSIF data for code navigation
dotenv: 16
} }
enum file_format: { enum file_format: {
... ...
......
...@@ -4,11 +4,14 @@ module Ci ...@@ -4,11 +4,14 @@ module Ci
class JobVariable < ApplicationRecord class JobVariable < ApplicationRecord
extend Gitlab::Ci::Model extend Gitlab::Ci::Model
include NewHasVariable include NewHasVariable
include BulkInsertSafe
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
alias_attribute :secret_value, :value alias_attribute :secret_value, :value
validates :key, uniqueness: { scope: :job_id } validates :key, uniqueness: { scope: :job_id }, unless: :dotenv_source?
enum source: { internal: 0, dotenv: 1 }, _suffix: true
end end
end end
...@@ -67,14 +67,14 @@ class DiffFileEntity < DiffFileBaseEntity ...@@ -67,14 +67,14 @@ class DiffFileEntity < DiffFileBaseEntity
private private
def parallel_diff_view?(options, diff_file) def parallel_diff_view?(options, diff_file)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project) return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project, default_enabled: true)
# If we're not rendering inline, we must be rendering parallel # If we're not rendering inline, we must be rendering parallel
!inline_diff_view?(options, diff_file) !inline_diff_view?(options, diff_file)
end end
def inline_diff_view?(options, diff_file) def inline_diff_view?(options, diff_file)
return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project) return true unless Feature.enabled?(:single_mr_diff_view, diff_file.repository.project, default_enabled: true)
# If nothing is present, inline will be the default. # If nothing is present, inline will be the default.
options.fetch(:diff_view, :inline).to_sym == :inline options.fetch(:diff_view, :inline).to_sym == :inline
... ...
......
...@@ -10,10 +10,24 @@ module Ci ...@@ -10,10 +10,24 @@ module Ci
].freeze ].freeze
def execute(job, artifacts_file, params, metadata_file: nil) def execute(job, artifacts_file, params, metadata_file: nil)
return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
artifact, artifact_metadata = build_artifact(job, artifacts_file, params, metadata_file)
result = parse_artifact(job, artifact)
return result unless result[:status] == :success
persist_artifact(job, artifact, artifact_metadata)
end
private
def build_artifact(job, artifacts_file, params, metadata_file)
expire_in = params['expire_in'] || expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.job_artifacts.build( artifact = Ci::JobArtifact.new(
job_id: job.id,
project: job.project, project: job.project,
file: artifacts_file, file: artifacts_file,
file_type: params['artifact_type'], file_type: params['artifact_type'],
...@@ -21,8 +35,9 @@ module Ci ...@@ -21,8 +35,9 @@ module Ci
file_sha256: artifacts_file.sha256, file_sha256: artifacts_file.sha256,
expire_in: expire_in) expire_in: expire_in)
if metadata_file artifact_metadata = if metadata_file
job.job_artifacts.build( Ci::JobArtifact.new(
job_id: job.id,
project: job.project, project: job.project,
file: metadata_file, file: metadata_file,
file_type: :metadata, file_type: :metadata,
...@@ -31,24 +46,40 @@ module Ci ...@@ -31,24 +46,40 @@ module Ci
expire_in: expire_in) expire_in: expire_in)
end end
if job.update(artifacts_expire_in: expire_in) [artifact, artifact_metadata]
success
else
error(job.errors.messages, :bad_request)
end end
rescue ActiveRecord::RecordNotUnique => error def parse_artifact(job, artifact)
return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file) unless Feature.enabled?(:ci_synchronous_artifact_parsing, job.project, default_enabled: true)
return success
end
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(job, artifact)
else success
end
end
def persist_artifact(job, artifact, artifact_metadata)
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
end
success
rescue ActiveRecord::RecordNotUnique => error
track_exception(error, job, params) track_exception(error, job, params)
error('another artifact of the same type already exists', :bad_request) error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error rescue *OBJECT_STORAGE_ERRORS => error
track_exception(error, job, params) track_exception(error, job, params)
error(error.message, :service_unavailable) error(error.message, :service_unavailable)
rescue => error
error(error.message, :bad_request)
end end
private
def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file) def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type) existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact return false unless existing_artifact
...@@ -63,5 +94,9 @@ module Ci ...@@ -63,5 +94,9 @@ module Ci
uploading_type: params['artifact_type'] uploading_type: params['artifact_type']
) )
end end
def parse_dotenv_artifact(job, artifact)
Ci::ParseDotenvArtifactService.new(job.project, current_user).execute(artifact)
end
end end
end end
# frozen_string_literal: true
module Ci
class ParseDotenvArtifactService < ::BaseService
MAX_ACCEPTABLE_DOTENV_SIZE = 5.kilobytes
MAX_ACCEPTABLE_VARIABLES_COUNT = 10
SizeLimitError = Class.new(StandardError)
ParserError = Class.new(StandardError)
def execute(artifact)
validate!(artifact)
variables = parse!(artifact)
Ci::JobVariable.bulk_insert!(variables)
success
rescue SizeLimitError, ParserError, ActiveRecord::RecordInvalid => error
Gitlab::ErrorTracking.track_exception(error, job_id: artifact.job_id)
error(error.message, :bad_request)
end
private
def validate!(artifact)
unless artifact&.dotenv?
raise ArgumentError, 'Artifact is not dotenv file type'
end
unless artifact.file.size < MAX_ACCEPTABLE_DOTENV_SIZE
raise SizeLimitError,
"Dotenv Artifact Too Big. Maximum Allowable Size: #{MAX_ACCEPTABLE_DOTENV_SIZE}"
end
end
def parse!(artifact)
variables = []
artifact.each_blob do |blob|
blob.each_line do |line|
key, value = scan_line!(line)
variables << Ci::JobVariable.new(job_id: artifact.job_id,
source: :dotenv, key: key, value: value)
end
end
if variables.size > MAX_ACCEPTABLE_VARIABLES_COUNT
raise SizeLimitError,
"Dotenv files cannot have more than #{MAX_ACCEPTABLE_VARIABLES_COUNT} variables"
end
variables
end
def scan_line!(line)
result = line.scan(/^(.*)=(.*)$/).last
raise ParserError, 'Invalid Format' if result.nil?
result.each(&:strip!)
end
end
end
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
- link_start = '<a href="%{url}">'.html_safe % { url: help_page_path('ci/variables/README', anchor: 'protected-variables') } - link_start = '<a href="%{url}">'.html_safe % { url: help_page_path('ci/variables/README', anchor: 'protected-variables') }
= s_('Environment variables are configured by your administrator to be %{link_start}protected%{link_end} by default').html_safe % { link_start: link_start, link_end: '</a>'.html_safe } = s_('Environment variables are configured by your administrator to be %{link_start}protected%{link_end} by default').html_safe % { link_start: link_start, link_end: '</a>'.html_safe }
- if Feature.enabled?(:new_variables_ui, @project || @group) - if Feature.enabled?(:new_variables_ui, @project || @group, default_enabled: true)
- is_group = !@group.nil? - is_group = !@group.nil?
#js-ci-project-variables{ data: { endpoint: save_endpoint, project_id: @project&.id || '', group: is_group.to_s, maskable_regex: ci_variable_maskable_regex} } #js-ci-project-variables{ data: { endpoint: save_endpoint, project_id: @project&.id || '', group: is_group.to_s, maskable_regex: ci_variable_maskable_regex} }
... ...
......
---
title: Support DotEnv Variables through report type artifact
merge_request: 26247
author:
type: added
---
title: Diffs load each view style separately, on demand
merge_request: 24821
author:
type: performance
---
title: Update UI for project and group settings CI variables
merge_request: 26901
author:
type: added
# frozen_string_literal: true
class AddRuntimeCreatedToCiJobVariables < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
DEFAULT_SOURCE = 0 # Equvalent to Ci::JobVariable.internal_source
def up
add_column_with_default(:ci_job_variables, :source, :integer, limit: 2, default: DEFAULT_SOURCE, allow_null: false)
end
def down
remove_column(:ci_job_variables, :source)
end
end
...@@ -780,6 +780,7 @@ ActiveRecord::Schema.define(version: 2020_03_12_163407) do ...@@ -780,6 +780,7 @@ ActiveRecord::Schema.define(version: 2020_03_12_163407) do
t.string "encrypted_value_iv" t.string "encrypted_value_iv"
t.bigint "job_id", null: false t.bigint "job_id", null: false
t.integer "variable_type", limit: 2, default: 1, null: false t.integer "variable_type", limit: 2, default: 1, null: false
t.integer "source", limit: 2, default: 0, null: false
t.index ["job_id"], name: "index_ci_job_variables_on_job_id" t.index ["job_id"], name: "index_ci_job_variables_on_job_id"
t.index ["key", "job_id"], name: "index_ci_job_variables_on_key_and_job_id", unique: true t.index ["key", "job_id"], name: "index_ci_job_variables_on_key_and_job_id", unique: true
end end
... ...
......
...@@ -156,6 +156,70 @@ Starting with GitLab 9.3, the environment URL is exposed to the Runner via ...@@ -156,6 +156,70 @@ Starting with GitLab 9.3, the environment URL is exposed to the Runner via
- `.gitlab-ci.yml`. - `.gitlab-ci.yml`.
- The external URL from the environment if not defined in `.gitlab-ci.yml`. - The external URL from the environment if not defined in `.gitlab-ci.yml`.
#### Set dynamic environment URLs after a job finishes
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/17066) in GitLab 12.9.
In a job script, you can specify a static [environment URL](#using-the-environment-url).
However, there may be times when you want a dynamic URL. For example,
if you deploy a Review App to an external hosting
service that generates a random URL per deployment, like `https://94dd65b.amazonaws.com/qa-lambda-1234567`,
you don't know the URL before the deployment script finishes.
If you want to use the environment URL in GitLab, you would have to update it manually.
To address this problem, you can configure a deployment job to report back a set of
variables, including the URL that was dynamically-generated by the external service.
GitLab supports [dotenv](https://github.com/bkeepers/dotenv) file as the format,
and expands the `environment:url` value with variables defined in the dotenv file.
To use this feature, specify the
[`artifacts:reports:dotenv`](yaml/README.md#artifactsreportsdotenv) keyword in `.gitlab-ci.yml`.
##### Example of setting dynamic environment URLs
The following example shows a Review App that creates a new environment
per merge request. The `review` job is triggered by every push, and
creates or updates an environment named `review/your-branch-name`.
The environment URL is set to `$DYNAMIC_ENVIRONMENT_URL`:
```yaml
review:
script:
- DYNAMIC_ENVIRONMENT_URL=$(deploy-script) # In script, get the environment URL.
- echo "DYNAMIC_ENVIRONMENT_URL=$DYNAMIC_ENVIRONMENT_URL" >> deploy.env # Add the value to a dotenv file.
artifacts:
reports:
dotenv: deploy.env # Report back dotenv file to rails.
environment:
name: review/$CI_COMMIT_REF_SLUG
url: $DYNAMIC_ENVIRONMENT_URL # and set the variable produced in script to `environment:url`
on_stop: stop_review
stop_review:
script:
- ./teardown-environment
when: manual
environment:
name: review/$CI_COMMIT_REF_SLUG
action: stop
```
As soon as the `review` job finishes, GitLab updates the `review/your-branch-name`
environment's URL.
It parses the report artifact `deploy.env`, registers a list of variables as runtime-created,
uses it for expanding `environment:url: $DYNAMIC_ENVIRONMENT_URL` and sets it to the environment URL.
You can also specify a static part of the URL at `environment:url:`, such as
`https://$DYNAMIC_ENVIRONMENT_URL`. If the value of `DYNAMIC_ENVIRONMENT_URL` is
`123.awesome.com`, the final result will be `https://123.awesome.com`.
The assigned URL for the `review/your-branch-name` environment is visible in the UI.
[See where the environment URL is displayed](#using-the-environment-url).
> **Notes:**
>
> - `stop_review` doesn't generate a dotenv report artifact, so it won't recognize the `DYNAMIC_ENVIRONMENT_URL` variable. Therefore you should not set `environment:url:` in the `stop_review` job.
> - If the environment URL is not valid (for example, the URL is malformed), the system doesn't update the environment URL.
### Configuring manual deployments ### Configuring manual deployments
Adding `when: manual` to an automatically executed job's configuration converts it to Adding `when: manual` to an automatically executed job's configuration converts it to
... ...
......
--- ---
disqus_identifier: 'https://docs.gitlab.com/ee/ci/pipelines.html'
type: reference type: reference
--- ---
... ...
......