提交 f4186a75 编写于 作者: G GitLab Bot

Add latest changes from gitlab-org/gitlab@master

上级 02211168
<script>
import { initEditorLite } from '~/blob/utils';
export default {
props: {
value: {
type: String,
required: true,
},
fileName: {
type: String,
required: false,
default: '',
},
},
data() {
return {
content: this.value,
editor: null,
};
},
watch: {
fileName(newVal) {
this.editor.updateModelLanguage(newVal);
},
},
mounted() {
this.editor = initEditorLite({
el: this.$refs.editor,
blobPath: this.fileName,
blobContent: this.content,
});
},
methods: {
triggerFileChange() {
const val = this.editor.getValue();
this.content = val;
this.$emit('input', val);
},
},
};
</script>
<template>
<div class="file-content code">
<pre id="editor" ref="editor" data-editor-loading @focusout="triggerFileChange">{{
content
}}</pre>
</div>
</template>
/* global ace */
import Editor from '~/editor/editor_lite';
export function initEditorLite({ el, blobPath, blobContent }) {
if (!el) {
throw new Error(`"el" parameter is required to initialize Editor`);
}
let editor;
if (window?.gon?.features?.monacoSnippets) {
editor = new Editor();
editor.createInstance({
el,
blobPath,
blobContent,
});
} else {
editor = ace.edit(el);
}
return editor;
}
export default () => ({});
<script>
import _ from 'underscore';
import { mapState } from 'vuex';
import { GlTooltipDirective } from '@gitlab/ui';
import { GlLabel, GlTooltipDirective } from '@gitlab/ui';
import issueCardInner from 'ee_else_ce/boards/mixins/issue_card_inner';
import { sprintf, __ } from '~/locale';
import Icon from '~/vue_shared/components/icon.vue';
......@@ -10,18 +10,17 @@ import UserAvatarLink from '../../vue_shared/components/user_avatar/user_avatar_
import IssueDueDate from './issue_due_date.vue';
import IssueTimeEstimate from './issue_time_estimate.vue';
import boardsStore from '../stores/boards_store';
import IssueCardInnerScopedLabel from './issue_card_inner_scoped_label.vue';
import { isScopedLabel } from '~/lib/utils/common_utils';
export default {
components: {
GlLabel,
Icon,
UserAvatarLink,
TooltipOnTruncate,
IssueDueDate,
IssueTimeEstimate,
IssueCardWeight: () => import('ee_component/boards/components/issue_card_weight.vue'),
IssueCardInnerScopedLabel,
},
directives: {
GlTooltip: GlTooltipDirective,
......@@ -145,12 +144,6 @@ export default {
boardsStore.toggleFilter(filter);
},
labelStyle(label) {
return {
backgroundColor: label.color,
color: label.textColor,
};
},
showScopedLabel(label) {
return boardsStore.scopedLabels.enabled && isScopedLabel(label);
},
......@@ -184,27 +177,16 @@ export default {
</div>
<div v-if="showLabelFooter" class="board-card-labels prepend-top-4 d-flex flex-wrap">
<template v-for="label in orderedLabels">
<issue-card-inner-scoped-label
v-if="showScopedLabel(label)"
<gl-label
:key="label.id"
:label="label"
:label-style="labelStyle(label)"
:background-color="label.color"
:title="label.title"
:description="label.description"
size="sm"
:scoped="showScopedLabel(label)"
:scoped-labels-documentation-link="helpLink"
@scoped-label-click="filterByLabel($event)"
/>
<button
v-else
:key="label.id"
v-gl-tooltip
:style="labelStyle(label)"
:title="label.description"
class="badge color-label append-right-4 prepend-top-4"
type="button"
@click="filterByLabel(label)"
>
{{ label.title }}
</button>
/>
</template>
</div>
<div class="board-card-footer d-flex justify-content-between align-items-end">
......
<script>
import { GlLink, GlTooltip } from '@gitlab/ui';
export default {
components: {
GlTooltip,
GlLink,
},
props: {
label: {
type: Object,
required: true,
},
labelStyle: {
type: Object,
required: true,
},
scopedLabelsDocumentationLink: {
type: String,
required: true,
},
},
};
</script>
<template>
<span
class="d-inline-block position-relative scoped-label-wrapper append-right-4 prepend-top-4 board-label"
>
<a @click="$emit('scoped-label-click', label)">
<span :ref="'labelTitleRef'" :style="labelStyle" class="badge label color-label">
{{ label.title }}
</span>
<gl-tooltip :target="() => $refs.labelTitleRef" placement="top" boundary="viewport">
<span class="font-weight-bold scoped-label-tooltip-title">{{ __('Scoped label') }}</span
><br />
{{ label.description }}
</gl-tooltip>
</a>
<gl-link :href="scopedLabelsDocumentationLink" target="_blank" class="label scoped-label"
><i class="fa fa-question-circle" :style="labelStyle"></i
></gl-link>
</span>
</template>
<script>
import { mapState, mapActions } from 'vuex';
import { GlTable, GlLoadingIcon, GlBadge } from '@gitlab/ui';
import { CLUSTER_TYPES } from '../constants';
import { __ } from '~/locale';
import tooltip from '~/vue_shared/directives/tooltip';
import { CLUSTER_TYPES, STATUSES } from '../constants';
import { __, sprintf } from '~/locale';
export default {
components: {
......@@ -10,6 +11,9 @@ export default {
GlLoadingIcon,
GlBadge,
},
directives: {
tooltip,
},
fields: [
{
key: 'name',
......@@ -38,6 +42,13 @@ export default {
},
methods: {
...mapActions(['fetchClusters']),
statusClass(status) {
return STATUSES[status].className;
},
statusTitle(status) {
const { title } = STATUSES[status];
return sprintf(__('Status: %{title}'), { title }, false);
},
},
};
</script>
......@@ -52,6 +63,25 @@ export default {
variant="light"
class="qa-clusters-table"
>
<template #cell(name)="{ item }">
<div class="d-flex flex-row-reverse flex-md-row js-status">
{{ item.name }}
<gl-loading-icon
v-if="item.status === 'deleting'"
v-tooltip
:title="statusTitle(item.status)"
size="sm"
class="mr-2 ml-md-2"
/>
<div
v-else
v-tooltip
class="cluster-status-indicator rounded-circle align-self-center gl-w-8 gl-h-8 mr-2 ml-md-2"
:class="statusClass(item.status)"
:title="statusTitle(item.status)"
></div>
</div>
</template>
<template #cell(clusterType)="{value}">
<gl-badge variant="light">
{{ value }}
......
......@@ -6,6 +6,10 @@ export const CLUSTER_TYPES = {
instance_type: __('Instance'),
};
export default {
CLUSTER_TYPES,
export const STATUSES = {
disabled: { className: 'disabled', title: __('Disabled') },
connected: { className: 'bg-success', title: __('Connected') },
unreachable: { className: 'bg-danger', title: __('Unreachable') },
authentication_failure: { className: 'bg-warning', title: __('Authentication Failure') },
deleting: { title: __('Deleting') },
};
export default (initialState = {}) => ({
endpoint: initialState.endpoint,
loading: false, // TODO - set this to true once integrated with BE
clusters: [
// TODO - remove mock data once integrated with BE
// {
// name: 'My Cluster',
// environmentScope: '*',
// size: '3',
// clusterType: 'group_type',
// },
// {
// name: 'My other cluster',
// environmentScope: 'production',
// size: '12',
// clusterType: 'project_type',
// },
],
clusters: [],
});
/* global ace */
import Editor from '~/editor/editor_lite';
import { initEditorLite } from '~/blob/utils';
import setupCollapsibleInputs from './collapsible_input';
let editor;
const initAce = () => {
editor = ace.edit('editor');
const editorEl = document.getElementById('editor');
const form = document.querySelector('.snippet-form-holder form');
const content = document.querySelector('.snippet-file-content');
editor = initEditorLite({ el: editorEl });
form.addEventListener('submit', () => {
content.value = editor.getValue();
});
......@@ -20,8 +21,7 @@ const initMonaco = () => {
const fileNameEl = document.querySelector('.js-snippet-file-name');
const form = document.querySelector('.snippet-form-holder form');
editor = new Editor();
editor.createInstance({
editor = initEditorLite({
el: editorEl,
blobPath: fileNameEl.value,
blobContent: contentEl.value,
......
<script>
import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue';
import BlobContentEdit from '~/blob/components/blob_edit_content.vue';
export default {
components: {
BlobHeaderEdit,
BlobContentEdit,
},
props: {
content: {
type: String,
required: true,
},
fileName: {
type: String,
required: true,
},
},
data() {
return {
name: this.fileName,
blobContent: this.content,
};
},
};
</script>
<template>
<div class="form-group file-editor">
<label>{{ s__('Snippets|File') }}</label>
<div class="file-holder snippet">
<blob-header-edit v-model="name" />
<blob-content-edit v-model="blobContent" :file-name="name" />
</div>
</div>
</template>
......@@ -111,4 +111,6 @@ export function initUserTracking() {
if (opts.linkClickTracking) window.snowplow('enableLinkClickTracking');
Tracking.bindDocument();
document.dispatchEvent(new Event('SnowplowInitialized'));
}
<script>
import DropdownValueScopedLabel from './dropdown_value_scoped_label.vue';
import DropdownValueRegularLabel from './dropdown_value_regular_label.vue';
import { GlLabel } from '@gitlab/ui';
import { isScopedLabel } from '~/lib/utils/common_utils';
export default {
components: {
DropdownValueScopedLabel,
DropdownValueRegularLabel,
GlLabel,
},
props: {
labels: {
......@@ -37,12 +35,6 @@ export default {
labelFilterUrl(label) {
return `${this.labelFilterBasePath}?label_name[]=${encodeURIComponent(label.title)}`;
},
labelStyle(label) {
return {
color: label.textColor,
backgroundColor: label.color,
};
},
scopedLabelsDescription({ description = '' }) {
return `<span class="font-weight-bold scoped-label-tooltip-title">Scoped label</span><br />${description}`;
},
......@@ -65,22 +57,15 @@ export default {
</span>
<template v-for="label in labels" v-else>
<dropdown-value-scoped-label
v-if="showScopedLabels(label)"
<gl-label
:key="label.id"
:label="label"
:label-filter-url="labelFilterUrl(label)"
:label-style="labelStyle(label)"
:target="labelFilterUrl(label)"
:background-color="label.color"
:title="label.title"
:description="label.description"
:scoped="showScopedLabels(label)"
:scoped-labels-documentation-link="scopedLabelsDocumentationLink"
/>
<dropdown-value-regular-label
v-else
:key="label.id"
:label="label"
:label-filter-url="labelFilterUrl(label)"
:label-style="labelStyle(label)"
/>
</template>
</div>
</template>
<script>
import { GlTooltip } from '@gitlab/ui';
export default {
components: {
GlTooltip,
},
props: {
label: {
type: Object,
required: true,
},
labelStyle: {
type: Object,
required: true,
},
labelFilterUrl: {
type: String,
required: true,
},
},
};
</script>
<template>
<a ref="regularLabelRef" :href="labelFilterUrl">
<span :style="labelStyle" class="badge color-label">
{{ label.title }}
</span>
<gl-tooltip
v-if="label.description"
:target="() => $refs.regularLabelRef"
placement="top"
boundary="viewport"
>
{{ label.description }}
</gl-tooltip>
</a>
</template>
<script>
import { GlLink, GlTooltip } from '@gitlab/ui';
export default {
components: {
GlTooltip,
GlLink,
},
props: {
label: {
type: Object,
required: true,
},
labelStyle: {
type: Object,
required: true,
},
scopedLabelsDocumentationLink: {
type: String,
required: true,
},
labelFilterUrl: {
type: String,
required: true,
},
},
};
</script>
<template>
<span class="d-inline-block position-relative scoped-label-wrapper">
<a :href="labelFilterUrl">
<span :ref="`labelTitleRef`" :style="labelStyle" class="badge color-label label">
{{ label.title }}
</span>
<gl-tooltip
v-if="label.description"
:target="() => $refs.labelTitleRef"
placement="top"
boundary="viewport"
>
<span class="font-weight-bold scoped-label-tooltip-title">{{ __('Scoped label') }}</span
><br />
{{ label.description }}
</gl-tooltip>
</a>
<gl-link :href="scopedLabelsDocumentationLink" target="_blank" class="label scoped-label"
><i class="fa fa-question-circle" :style="labelStyle"></i
></gl-link>
</span>
</template>
......@@ -266,20 +266,9 @@
background-color: $blue-50;
}
.badge {
border: 0;
outline: 0;
&:hover {
text-decoration: underline;
}
@include media-breakpoint-down(lg) {
font-size: $gl-font-size-xs;
padding-left: $gl-padding-4;
padding-right: $gl-padding-4;
font-weight: $gl-font-weight-bold;
}
.gl-label {
margin-top: 4px;
margin-right: 4px;
}
.confidential-icon {
......
......@@ -163,3 +163,9 @@
color: $black;
font-weight: $gl-font-weight-bold;
}
.cluster-status-indicator {
&.disabled {
background-color: $gray-600;
}
}
......@@ -158,6 +158,10 @@
a:not(.btn) {
color: inherit;
.gl-label-text:hover {
color: inherit;
}
&:hover {
color: $blue-800;
......
......@@ -54,8 +54,10 @@
.mh-50vh { max-height: 50vh; }
.font-size-inherit { font-size: inherit; }
.gl-w-8 { width: px-to-rem($grid-size); }
.gl-w-16 { width: px-to-rem($grid-size * 2); }
.gl-w-64 { width: px-to-rem($grid-size * 8); }
.gl-h-8 { height: px-to-rem($grid-size); }
.gl-h-32 { height: px-to-rem($grid-size * 4); }
.gl-h-64 { height: px-to-rem($grid-size * 8); }
......
# frozen_string_literal: true
module Ci
# A state object to centralize logic related to merge request pipelines
class PipelinesForMergeRequestFinder
include Gitlab::Utils::StrongMemoize
EVENT = 'merge_request_event'
def initialize(merge_request)
@merge_request = merge_request
end
attr_reader :merge_request
delegate :commit_shas, :source_project, :source_branch, to: :merge_request
def all
strong_memoize(:all_pipelines) do
next Ci::Pipeline.none unless source_project
pipelines =
if merge_request.persisted?
pipelines_using_cte
else
triggered_for_branch.for_sha(commit_shas)
end
sort(pipelines)
end
end
private
def pipelines_using_cte
cte = Gitlab::SQL::CTE.new(:shas, merge_request.all_commits.select(:sha))
source_pipelines_join = cte.table[:sha].eq(Ci::Pipeline.arel_table[:source_sha])
source_pipelines = filter_by(triggered_by_merge_request, cte, source_pipelines_join)
detached_pipelines = filter_by_sha(triggered_by_merge_request, cte)
pipelines_for_branch = filter_by_sha(triggered_for_branch, cte)
Ci::Pipeline.with(cte.to_arel) # rubocop: disable CodeReuse/ActiveRecord
.from_union([source_pipelines, detached_pipelines, pipelines_for_branch])
end
def filter_by_sha(pipelines, cte)
hex = Arel::Nodes::SqlLiteral.new("'hex'")
string_sha = Arel::Nodes::NamedFunction.new('encode', [cte.table[:sha], hex])
join_condition = string_sha.eq(Ci::Pipeline.arel_table[:sha])
filter_by(pipelines, cte, join_condition)
end
def filter_by(pipelines, cte, join_condition)
shas_table =
Ci::Pipeline.arel_table
.join(cte.table, Arel::Nodes::InnerJoin)
.on(join_condition)
.join_sources
pipelines.joins(shas_table) # rubocop: disable CodeReuse/ActiveRecord
end
# NOTE: this method returns only parent merge request pipelines.
# Child merge request pipelines have a different source.
def triggered_by_merge_request
source_project.ci_pipelines
.where(source: :merge_request_event, merge_request: merge_request) # rubocop: disable CodeReuse/ActiveRecord
end
def triggered_for_branch
source_project.ci_pipelines
.where(source: branch_pipeline_sources, ref: source_branch, tag: false) # rubocop: disable CodeReuse/ActiveRecord
end
def branch_pipeline_sources
strong_memoize(:branch_pipeline_sources) do
Ci::Pipeline.sources.reject { |source| source == EVENT }.values
end
end
def sort(pipelines)
sql = 'CASE ci_pipelines.source WHEN (?) THEN 0 ELSE 1 END, ci_pipelines.id DESC'
query = ApplicationRecord.send(:sanitize_sql_array, [sql, Ci::Pipeline.sources[:merge_request_event]]) # rubocop:disable GitlabSecurity/PublicSend
pipelines.order(Arel.sql(query)) # rubocop: disable CodeReuse/ActiveRecord
end
end
end
......@@ -1251,7 +1251,7 @@ class MergeRequest < ApplicationRecord
def all_pipelines
strong_memoize(:all_pipelines) do
MergeRequest::Pipelines.new(self).all
Ci::PipelinesForMergeRequestFinder.new(self).all
end
end
......
# frozen_string_literal: true
# A state object to centralize logic related to merge request pipelines
class MergeRequest::Pipelines
include Gitlab::Utils::StrongMemoize
EVENT = 'merge_request_event'
def initialize(merge_request)
@merge_request = merge_request
end
attr_reader :merge_request
delegate :commit_shas, :source_project, :source_branch, to: :merge_request
def all
strong_memoize(:all_pipelines) do
next Ci::Pipeline.none unless source_project
pipelines =
if merge_request.persisted?
pipelines_using_cte
else
triggered_for_branch.for_sha(commit_shas)
end
sort(pipelines)
end
end
private
def pipelines_using_cte
cte = Gitlab::SQL::CTE.new(:shas, merge_request.all_commits.select(:sha))
source_pipelines_join = cte.table[:sha].eq(Ci::Pipeline.arel_table[:source_sha])
source_pipelines = filter_by(triggered_by_merge_request, cte, source_pipelines_join)
detached_pipelines = filter_by_sha(triggered_by_merge_request, cte)
pipelines_for_branch = filter_by_sha(triggered_for_branch, cte)
Ci::Pipeline.with(cte.to_arel)
.from_union([source_pipelines, detached_pipelines, pipelines_for_branch])
end
def filter_by_sha(pipelines, cte)
hex = Arel::Nodes::SqlLiteral.new("'hex'")
string_sha = Arel::Nodes::NamedFunction.new('encode', [cte.table[:sha], hex])
join_condition = string_sha.eq(Ci::Pipeline.arel_table[:sha])
filter_by(pipelines, cte, join_condition)
end
def filter_by(pipelines, cte, join_condition)
shas_table =
Ci::Pipeline.arel_table
.join(cte.table, Arel::Nodes::InnerJoin)
.on(join_condition)
.join_sources
pipelines.joins(shas_table)
end
# NOTE: this method returns only parent merge request pipelines.
# Child merge request pipelines have a different source.
def triggered_by_merge_request
source_project.ci_pipelines
.where(source: :merge_request_event, merge_request: merge_request)
end
def triggered_for_branch
source_project.ci_pipelines
.where(source: branch_pipeline_sources, ref: source_branch, tag: false)
end
def branch_pipeline_sources
strong_memoize(:branch_pipeline_sources) do
Ci::Pipeline.sources.reject { |source| source == EVENT }.values
end
end
def sort(pipelines)
sql = 'CASE ci_pipelines.source WHEN (?) THEN 0 ELSE 1 END, ci_pipelines.id DESC'
query = ApplicationRecord.send(:sanitize_sql_array, [sql, Ci::Pipeline.sources[:merge_request_event]]) # rubocop:disable GitlabSecurity/PublicSend
pipelines.order(Arel.sql(query))
end
end
---
title: Resolve Change link-icons on security configuration page to follow design system
merge_request: 26340
author:
type: other
---
title: Support more query variables in custom dashboards per project
merge_request: 25732
author:
type: added
---
title: Update labels in Vue with GlLabel component
merge_request: 21465
author:
type: changed
---
title: Show cluster status (FE)
merge_request: 26368
author:
type: added
......@@ -1006,6 +1006,12 @@ unset http_proxy
unset https_proxy
```
### Gitaly not listening on new address after reconfiguring
When updating the `gitaly['listen_addr']` or `gitaly['prometheus_listen_addr']` values, Gitaly may continue to listen on the old address after a `sudo gitlab-ctl reconfigure`.
When this occurs, performing a `sudo gitlab-ctl restart` will resolve the issue. This will no longer be necessary after [this issue](https://gitlab.com/gitlab-org/gitaly/issues/2521) is resolved.
### Praefect
Praefect is an experimental daemon that allows for replication of the Git data.
......
......@@ -84,6 +84,8 @@ with secure tokens as you complete the setup process.
Praefect cluster directly; that could lead to data loss.
1. `PRAEFECT_SQL_PASSWORD`: this password is used by Praefect to connect to
PostgreSQL.
1. `GRAFANA_PASSWORD`: this password is used to access the `admin`
account in the Grafana dashboards.
We will note in the instructions below where these secrets are required.
......@@ -184,6 +186,10 @@ application server, or a Gitaly node.
# Make Praefect accept connections on all network interfaces.
# Use firewalls to restrict access to this address/port.
praefect['listen_addr'] = '0.0.0.0:2305'
# Enable Prometheus metrics access to Praefect. You must use firewalls
# to restrict access to this address/port.
praefect['prometheus_listen_addr'] = '0.0.0.0:9652'
```
1. Configure a strong `auth_token` for **Praefect** by editing
......@@ -354,6 +360,10 @@ documentation](index.md#3-gitaly-server-configuration).
# Make Gitaly accept connections on all network interfaces.
# Use firewalls to restrict access to this address/port.
gitaly['listen_addr'] = '0.0.0.0:8075'
# Enable Prometheus metrics access to Gitaly. You must use firewalls
# to restrict access to this address/port.
gitaly['prometheus_listen_addr'] = '0.0.0.0:9236'
```
1. Configure a strong `auth_token` for **Gitaly** by editing
......@@ -453,7 +463,7 @@ Particular attention should be shown to:
You will need to replace:
- `PRAEFECT_URL_OR_IP` with the IP/host address of the Praefect node
- `PRAEFECT_HOST` with the IP address or hostname of the Praefect node
- `PRAEFECT_EXTERNAL_TOKEN` with the real secret
```ruby
......@@ -462,7 +472,7 @@ Particular attention should be shown to:
"path" => "/var/opt/gitlab/git-data"
},
"praefect" => {
"gitaly_address" => "tcp://PRAEFECT_URL_OR_IP:2305",
"gitaly_address" => "tcp://PRAEFECT_HOST:2305",
"gitaly_token" => 'PRAEFECT_EXTERNAL_TOKEN'
}
})
......@@ -478,6 +488,38 @@ Particular attention should be shown to:
gitlab_shell['secret_token'] = 'GITLAB_SHELL_SECRET_TOKEN'
```
1. Add Prometheus monitoring settings by editing `/etc/gitlab/gitlab.rb`.
You will need to replace:
- `PRAEFECT_HOST` with the IP address or hostname of the Praefect node
- `GITALY_HOST` with the IP address or hostname of each Gitaly node
```ruby
prometheus['scrape_configs'] = [
{
'job_name' => 'praefect',
'static_configs' => [
'targets' => [
'PRAEFECT_HOST:9652' # praefect
]
]
},
{
'job_name' => 'praefect-gitaly',
'static_configs' => [
'targets' => [
'GITALY_HOST:9236', # gitaly-1
'GITALY_HOST:9236', # gitaly-2
'GITALY_HOST:9236', # gitaly-3
]
]
}
]
grafana['disable_login_form'] = false
```
1. Save the changes to `/etc/gitlab/gitlab.rb` and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure):
```shell
......@@ -490,6 +532,12 @@ Particular attention should be shown to:
sudo gitlab-rake gitlab:gitaly:check
```
1. Set the Grafana admin password. This command will prompt you to enter a new password:
```shell
sudo gitlab-ctl set-grafana-password
```
1. Update the **Repository storage** settings from **Admin Area > Settings >
Repository > Repository storage** to make the newly configured Praefect
cluster the storage location for new Git repositories.
......@@ -502,7 +550,12 @@ Particular attention should be shown to:
repository that viewed. If the project is created, and you can see the
README file, it works!
Congratulations! You have configured a highly available Praefect cluster, and
1. Inspect metrics by browsing to `/-/grafana` on your GitLab server.
Log in with `admin` / `GRAFANA_PASSWORD`. Go to 'Explore' and query
`gitlab_build_info` to verify that you are getting metrics from all your
machines.
Congratulations! You have configured a highly available Praefect cluster.
## Migrating existing repositories to Praefect
......
......@@ -58,6 +58,7 @@ Runs the following rake tasks:
- `gitlab:app:check`
It will check that each component was set up according to the installation guide and suggest fixes for issues found.
This command must be run from your app server and will not work correctly on component servers like [Gitaly](../gitaly/index.md#running-gitaly-on-its-own-server).
You may also have a look at our Troubleshooting Guides:
......
......@@ -23,7 +23,7 @@ The existing database model requires the following:
- A package can have one or more package files.
- The package model is based on storing information about the package and its version.
## API endpoints
### API endpoints
Package systems work with GitLab via API. For example `ee/lib/api/npm_packages.rb`
implements API endpoints to work with NPM clients. So, the first thing to do is to
......@@ -45,7 +45,7 @@ PUT https://gitlab.com/api/v4/projects/<your_project_id>/packages/npm/
Group-level and instance-level endpoints are good to have but are optional.
### Remote hierarchy
#### Remote hierarchy
Packages are scoped within various levels of access, which is generally configured by setting your remote. A
remote endpoint may be set at the project level, meaning when installing packages, only packages belonging to that
......@@ -68,7 +68,7 @@ NOTE: **Note:** NPM is currently a hybrid of the instance level and group level.
It is using the top-level group or namespace as the defining portion of the name
(for example, `@my-group-name/my-package-name`).
## Naming conventions
### Naming conventions
To avoid name conflict for instance-level endpoints you will need to define a package naming convention
that gives a way to identify the project that the package belongs to. This generally involves using the project
......@@ -82,36 +82,13 @@ a user from reusing an existing name within a given scope.
Otherwise, naming should follow the package manager's naming conventions and include a validation in the `package.md`
model for that package type.
## File uploads
File uploads should be handled by GitLab Workhorse using object accelerated uploads. What this means is that
the workhorse proxy that checks all incoming requests to GitLab will intercept the upload request,
upload the file, and forward a request to the main GitLab codebase only containing the metadata
and file location rather than the file itself. An overview of this process can be found in the
[development documentation](uploads.md#direct-upload).
In terms of code, this means a route will need to be added to the
[GitLab Workhorse project](https://gitlab.com/gitlab-org/gitlab-workhorse) for each level of remote being added
(instance, group, project). [This merge request](https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/412/diffs)
demonstrates adding an instance-level endpoint for Conan to workhorse. You can also see the Maven project level endpoint
implemented in the same file.
Once the route has been added, you will need to add an additional `/authorize` version of the upload endpoint to your API file.
[Here is an example](https://gitlab.com/gitlab-org/gitlab/blob/398fef1ca26ae2b2c3dc89750f6b20455a1e5507/ee/lib/api/maven_packages.rb#L164)
of the additional endpoint added for Maven. The `/authorize` endpoint verifies and authorizes the request from workhorse,
then the normal upload endpoint is implemented below, consuming the metadata that workhorse provides in order to
create the package record. Workhorse provides a variety of file metadata such as type, size, and different checksum formats.
For testing purposes, you may want to [enable object storage](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/object_storage.md)
in your local development environment.
## Services and finders
### Services and finders
Logic for performing tasks such as creating package or package file records or finding packages should not live
within the API file, but should live in services and finders. Existing services and finders should be used or
extended when possible to keep the common package logic grouped as much as possible.
## Configuration
### Configuration
GitLab has a `packages` section in its configuration file (`gitlab.rb`).
It applies to all package systems supported by GitLab. Usually you don't need
......@@ -119,7 +96,96 @@ to add anything there.
Packages can be configured to use object storage, therefore your code must support it.
## Database and handling metadata
## MVC Approach
The way new package systems are integrated in GitLab is using an [MVC](https://about.gitlab.com/handbook/values/#minimum-viable-change-mvc). Therefore, the first iteration should support the bare minimum user actions:
- Authentication
- Uploading a package
- Pulling a package
- Required actions
Required actions are all the additional requests that GitLab will need to handle so the corresponding package manager CLI can work properly. It could be a search feature or an endpoint providing meta information about a package. For example:
- For NuGet, the search request was implemented during the first MVC iteration, to support Visual Studio.
- For NPM, there is a metadata endpoint used by `npm` to get the tarball url.
For the first MVC iteration, it's recommended to stay at the project level of the [remote hierarchy](#remote-hierarchy). Other levels can be tackled with [future Merge Requests](#future-work).
There are usually 2 phases for the MVC:
- [Analysis](#analysis)
- [Implementation](#implementation)
### Keep iterations small
When implementing a new package manager, it is tempting to create one large merge request containing all of the
necessary endpoints and services necessary to support basic usage. Instead, put the
API endpoints behind a [feature flag](feature_flags/development.md) and
submit each endpoint or behavior (download, upload, etc) in a different merge request to shorten the review
process.
### Analysis
During this phase, the idea is to collect as much information as possible about the API used by the package system. Here some aspects that can be useful to include:
- **Authentication**: What authentication mechanisms are available (OAuth, Basic
Authorization, other). Keep in mind that GitLab users will often want to use their
[Personal Access Tokens](../user/profile/personal_access_tokens.md).
Although not needed for the MVC first iteration, the [CI job tokens](../user/project/new_ci_build_permissions_model.md#job-token)
have to be supported at some point in the future.
- **Requests**: Which requests are needed to have a working MVC. Ideally, produce
a list of all the requests needed for the MVC (including required actions). Further
investigation could provide an example for each request with the request and the response bodies.
- **Upload**: Carefully analyse how the upload process works. This will probably be the most
complex request to implement. A detailed analysis is desired here as uploads can be
encoded in different ways (body or multipart) and can even be in a totally different
format (for example, a JSON structure where the package file is a Base64 value of
a particular field). These different encodings lead to slightly different implementations
on GitLab and GitLab Workhorse. For more detailed information, review [file uploads](#file-uploads).
- **Endpoints**: Suggest a list of endpoint URLs that will be implemented in GitLab.
- **Split work**: Suggest a list of changes to do to incrementally build the MVC.
This will give a good idea of how much work there is to be done. Here is an example
list that would need to be adapted on a case by case basis:
1. Empty file structure (API file, base service for this package)
1. Authentication system for "logging in" to the package manager
1. Identify metadata and create applicable tables
1. Workhorse route for [object storage direct upload](uploads.md#direct-upload)
1. Endpoints required for upload/publish
1. Endpoints required for install/download
1. Endpoints required for required actions
The analysis usually takes a full milestone to complete, though it's not impossible to start the implementation in the same milestone.
In particular, the upload request can have some [requirements in the GitLab Workhorse project](#file-uploads). This project has a different release cycle than the rails backend. It's **strongly** recommended that you open an issue there as soon as the upload request analysis is done. This way GitLab Worhorse is already ready when the upload request is implemented on the rails backend.
### Implementation
The implementation of the different Merge Requests will vary between different package system integrations. Contributors should take into account some important aspects of the implementation phase.
#### Authentication
The MVC must support [Personal Access Tokens](../user/profile/personal_access_tokens.md) right from the start. We currently support two options for these tokens: OAuth and Basic Access.
OAuth authentication is already supported. You can see an example in the [npm API](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/api/npm_packages.rb).
[Basic Access authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Authentication)
support is done by overriding a specific function in the API helpers, like
[this example in the Conan API](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/api/conan_packages.rb).
For this authentication mechanism, keep in mind that some clients can send an unauthenticated
request first, wait for the 401 Unauthorized response with the [`WWW-Authenticate`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/WWW-Authenticate)
field, then send an updated (authenticated) request. This case is more involved as
GitLab needs to handle the 401 Unauthorized response. The [Nuget API](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/api/nuget_packages.rb)
supports this case.
#### Authorization
There are project and group level permissions for `read_package`, `create_package`, and `destroy_package`. Each
endpoint should
[authorize the requesting user](https://gitlab.com/gitlab-org/gitlab/blob/398fef1ca26ae2b2c3dc89750f6b20455a1e5507/ee/lib/api/conan_packages.rb)
against the project or group before continuing.
#### Database and handling metadata
The current database model allows you to store a name and a version for each package.
Every time you upload a new package, you can either create a new record of `Package`
......@@ -137,44 +203,41 @@ delegate from the package model.
Note that the existing package UI only displays information within the `packages_packages` and `packages_package_files`
tables. If the data stored in the metadata tables need to be displayed, a ~frontend change will be required.
## Authorization
#### File uploads
There are project and group level permissions for `read_package`, `create_package`, and `destroy_package`. Each
endpoint should
[authorize the requesting user](https://gitlab.com/gitlab-org/gitlab/blob/398fef1ca26ae2b2c3dc89750f6b20455a1e5507/ee/lib/api/conan_packages.rb#L84)
against the project or group before continuing.
## Keep iterations small
When implementing a new package manager, it is easy to end up creating one large merge request containing all of the
necessary endpoints and services necessary to support basic usage. If this is the case, consider putting the
API endpoints behind a [feature flag](feature_flags/development.md) and
submitting each endpoint or behavior (download, upload, etc) in different merge requests to shorten the review
process.
File uploads should be handled by GitLab Workhorse using object accelerated uploads. What this means is that
the workhorse proxy that checks all incoming requests to GitLab will intercept the upload request,
upload the file, and forward a request to the main GitLab codebase only containing the metadata
and file location rather than the file itself. An overview of this process can be found in the
[development documentation](uploads.md#direct-upload).
### Potential MRs for any given package system
In terms of code, this means a route will need to be added to the
[GitLab Workhorse project](https://gitlab.com/gitlab-org/gitlab-workhorse) for each upload endpoint being added
(instance, group, project). [This merge request](https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/412/diffs)
demonstrates adding an instance-level endpoint for Conan to workhorse. You can also see the Maven project level endpoint
implemented in the same file.
#### MVC MRs
Once the route has been added, you will need to add an additional `/authorize` version of the upload endpoint to your API file.
[Here is an example](https://gitlab.com/gitlab-org/gitlab/blob/398fef1ca26ae2b2c3dc89750f6b20455a1e5507/ee/lib/api/maven_packages.rb#L164)
of the additional endpoint added for Maven. The `/authorize` endpoint verifies and authorizes the request from workhorse,
then the normal upload endpoint is implemented below, consuming the metadata that workhorse provides in order to
create the package record. Workhorse provides a variety of file metadata such as type, size, and different checksum formats.
These changes represent all that is needed to deliver a minimally usable package management system.
For testing purposes, you may want to [enable object storage](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/master/doc/howto/object_storage.md)
in your local development environment.
1. Empty file structure (API file, base service for this package)
1. Authentication system for 'logging in' to the package manager
1. Identify metadata and create applicable tables
1. Workhorse route for [object storage direct upload](uploads.md#direct-upload)
1. Endpoints required for upload/publish
1. Endpoints required for install/download
1. Endpoints required for remove/delete
### Future Work
#### Possible post-MVC MRs
While working on the MVC, contributors will probably find features that are not mandatory for the MVC but can provide a better user experience. It's generally a good idea to keep an eye on those and open issues.
These updates are not essential to be able to publish and consume packages, but may be desired as the system is
released for general use.
Here are some examples
1. Endpoints required for search
1. Front end updates to display additional package information and metadata
1. Limits on file sizes
1. Tracking for metrics
1. Read more metadata fields from the package to make it available to the front end. For example, it's usual to be able to tag a package. Those tags can be read and saved by backend and then displayed on the packages UI.
1. Endpoints for the upper levels of the [remote hierarchy](#remote-hierarchy). This step might need to create a [naming convention](#naming-conventions)
## Exceptions
......
......@@ -11,7 +11,7 @@ type: reference, howto
The security configuration page displays the configuration state of each of the security
features and can be accessed through a project's sidebar nav.
![Screenshot of security configuration page](../img/security_configuration_page_v12_6.png)
![Screenshot of security configuration page](../img/security_configuration_page_v12_9.png)
The page uses the project's latest default branch [CI pipeline](../../../ci/pipelines.md) to determine the configuration
state of each feature. If a job with the expected security report artifact exists in the pipeline,
......
......@@ -38,14 +38,14 @@ If you follow the instructions you can publish `MyProject` by running
`npm publish` from the root directory.
Publishing `Foo` is almost exactly the same, you simply have to follow the steps
while in the `Foo` directory. `Foo` will need it's own `package.json` file,
which can be added manually or using `npm init`. And it will need it's own
while in the `Foo` directory. `Foo` will need its own `package.json` file,
which can be added manually or using `npm init`. And it will need its own
configuration settings. Since you are publishing to the same place, if you
used `npm config set` to set the registry for the parent project, then no
additional setup is necessary. If you used a `.npmrc` file, you will need an
additional `.npmrc` file in the `Foo` directory (be sure to add `.npmrc` files
to the `.gitignore` file or use environment variables in place of your access
tokens to preven them from being exposed). It can be identical to the
tokens to prevent them from being exposed). It can be identical to the
one you used in `MyProject`. You can now run `npm publish` from the `Foo`
directory and you will be able to publish `Foo` separately from `MyProject`
......
......@@ -4,13 +4,17 @@ GitLab allows users to easily deploy AWS Lambda functions and create rich server
GitLab supports deployment of functions to AWS Lambda using a combination of:
- [Serverless Framework with AWS](https://serverless.com/framework/docs/providers/aws/)
- [Serverless Framework with AWS](#serverless-framework)
- [AWS' Serverless Application Model (SAM)](#aws-serverless-application-model)
- GitLab CI/CD
## Serverless Framework
The [Serverless Framework can deploy to AWS](https://serverless.com/framework/docs/providers/aws/).
We have prepared an example with a step-by-step guide to create a simple function and deploy it on AWS.
Additionally, in the [How To section](#how-to), you can read about different use cases,
like:
Additionally, in the [How To section](#how-to), you can read about different use cases like:
- Running a function locally.
- Working with secrets.
......@@ -18,27 +22,27 @@ like:
Alternatively, you can quickly [create a new project with a template](../../../../gitlab-basics/create-project.md#project-templates). The [`Serverless Framework/JS` template](https://gitlab.com/gitlab-org/project-templates/serverless-framework/) already includes all parts described below.
## Example
### Example
In the following example, you will:
1. Create a basic AWS Lambda Node.js function.
1. Link the function to an API Gateway `GET` endpoint.
### Steps
#### Steps
The example consists of the following steps:
1. Creating a Lambda handler function
1. Creating a `serverless.yml` file
1. Crafting the `.gitlab-ci.yml` file
1. Setting up your AWS credentials with your GitLab account
1. Deploying your function
1. Testing the deployed function
1. Creating a Lambda handler function.
1. Creating a `serverless.yml` file.
1. Crafting the `.gitlab-ci.yml` file.
1. Setting up your AWS credentials with your GitLab account.
1. Deploying your function.
1. Testing the deployed function.
Lets take it step by step.
### Creating a Lambda handler function
#### Creating a Lambda handler function
Your Lambda function will be the primary handler of requests. In this case we will create a very simple Node.js `hello` function:
......@@ -67,7 +71,7 @@ In our case, `module.exports.hello` defines the `hello` handler that will be ref
You can learn more about the AWS Lambda Node.js function handler and all its various options here: <https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html>
### Creating a `serverless.yml` file
#### Creating a `serverless.yml` file
In the root of your project, create a `serverless.yml` file that will contain configuration specifics for the Serverless Framework.
......@@ -94,7 +98,7 @@ The `events` declaration will create a AWS API Gateway `GET` endpoint to receive
You can read more about the available properties and additional configuration possibilities of the Serverless Framework here: <https://serverless.com/framework/docs/providers/aws/guide/serverless.yml/>
### Crafting the `.gitlab-ci.yml` file
#### Crafting the `.gitlab-ci.yml` file
In a `.gitlab-ci.yml` file in the root of your project, place the following code:
......@@ -122,7 +126,7 @@ This example code does the following:
- Deploys the serverless function to your AWS account using the AWS credentials
defined above.
### Setting up your AWS credentials with your GitLab account
#### Setting up your AWS credentials with your GitLab account
In order to interact with your AWS account, the GitLab CI/CD pipelines require both `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` to be defined in your GitLab settings under **Settings > CI/CD > Variables**.
For more information please see: <https://docs.gitlab.com/ee/ci/variables/README.html#via-the-ui>
......@@ -130,7 +134,7 @@ For more information please see: <https://docs.gitlab.com/ee/ci/variables/README
NOTE: **Note:**
The AWS credentials you provide must include IAM policies that provision correct access control to AWS Lambda, API Gateway, CloudFormation, and IAM resources.
### Deploying your function
#### Deploying your function
`git push` the changes to your GitLab repository and the GitLab build pipeline will automatically deploy your function.
......@@ -142,7 +146,7 @@ endpoints:
GET - https://u768nzby1j.execute-api.us-east-1.amazonaws.com/production/hello
```
### Manually testing your function
#### Manually testing your function
Running the following `curl` command should trigger your function.
......@@ -165,7 +169,7 @@ Hooray! You now have a AWS Lambda function deployed via GitLab CI.
Nice work!
## How To
### How To
In this section, we show you how to build on the basic example to:
......@@ -173,7 +177,7 @@ In this section, we show you how to build on the basic example to:
- Set up secret variables.
- Set up CORS.
### Running function locally
#### Running function locally
The `serverless-offline` plugin allows to run your code locally. To run your code locally:
......@@ -204,7 +208,7 @@ It should output:
}
```
### Secret variables
#### Secret variables
Secrets are injected into your functions using environment variables.
......@@ -225,7 +229,7 @@ NOTE: **Note:**
Anyone with access to the AWS environment may be able to see the values of those
variables persisted in the lambda definition.
### Setting up CORS
#### Setting up CORS
If you want to set up a web page that makes calls to your function, like we have done in the [template](https://gitlab.com/gitlab-org/project-templates/serverless-framework/), you need to deal with the Cross-Origin Resource Sharing (CORS).
......@@ -269,13 +273,13 @@ module.exports.hello = async event => {
For more information, see the [Your CORS and API Gateway survival guide](https://serverless.com/blog/cors-api-gateway-survival-guide/)
blog post written by the Serverless Framework team.
### Writing automated tests
#### Writing automated tests
The [Serverless Framework](https://gitlab.com/gitlab-org/project-templates/serverless-framework/)
example project shows how to use Jest, Axios, and `serverless-offline` plugin to do
automated testing of both local and deployed serverless function.
## Examples and template
### Examples and template
The example code is available:
......@@ -285,3 +289,225 @@ The example code is available:
You can also use a [template](../../../../gitlab-basics/create-project.md#project-templates)
(based on the version with tests and secret variables) from within the GitLab UI (see
the `Serverless Framework/JS` template).
## AWS Serverless Application Model
AWS Serverless Application Model is an open source framework for building serverless
applications. It makes it easier to build and deploy serverless applications. For more
details, please take a look at AWS documentation on [AWS Serverless Application Model](https://docs.aws.amazon.com/serverless-application-model/).
### Deploying AWS Lambda function using AWS SAM and GitLab CI/CD
GitLab allows developers to build and deploy serverless applications using the combination of:
- [AWS Serverless Application Model (AWS SAM)](https://aws.amazon.com/serverless/sam/).
- GitLab CI/CD.
### Example
In the following example, you will:
- Install SAM CLI.
- Create a sample SAM application including a Lambda function and API Gateway.
- Build and deploy the application to your AWS account using GitLab CI/CD.
### Steps
The example consists of the following steps:
1. Installing SAM CLI.
1. Creating an AWS SAM application using SAM CLI.
1. Crafting the `.gitlab-ci.yml` file.
1. Setting up your AWS credentials with your GitLab account.
1. Deploying your application.
1. Testing the deployed function.
### Installing SAM CLI
AWS SAM provides a CLI called AWS SAM CLI to make it easier to create and manage
applications.
Some steps in this documentation use SAM CLI. Follow the instructions for
[installing SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
to install and configure SAM CLI.
If you use [AWS Cloud9](https://aws.amazon.com/cloud9/) as your integrated development
environment (IDE), the following are installed for you:
- [AWS Command Line Interface](https://docs.aws.amazon.com/en_pv/cli/latest/userguide/cli-chap-install.html)
- [SAM CLI](https://docs.aws.amazon.com/en_pv/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
- [Docker](https://docs.docker.com/install/) and necessary Docker images.
### Creating an AWS SAM application using SAM CLI
To create a new AWS SAM application:
1. Create a new GitLab project.
1. `git clone` the project into your local environment.
1. Change to the newly cloned project and create a new SAM app using the following command:
```shell
sam init -r python3.8 -n gitlabpoc --app-template "hello-world"
```
1. `git push` the application back to the GitLab project.
This creates a SAM app named `gitlabpoc` using the default configuration, a single
Python 3.8 function invoked by an [Amazon API Gateway](https://aws.amazon.com/api-gateway/)
endpoint. To see additional runtimes supported by SAM and options for `sam init`, run:
```shell
sam init -h
```
### Setting up your AWS credentials with your GitLab account
In order to interact with your AWS account, the GitLab CI/CD pipelines require both
`AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` to be set in the project's CI/CD
variables.
To set these:
1. Navigate to the project's **{settings}** **Settings > CI / CD**.
1. Expand the **Variables** section and create entires for `AWS_ACCESS_KEY_ID` and
`AWS_SECRET_ACCESS_KEY`.
1. Mask the credentials so they do not show in logs using the **Masked** toggle.
NOTE: **Note:**
The AWS credentials you provide must include IAM policies that provision correct access
control to AWS Lambda, API Gateway, CloudFormation, and IAM resources.
### Crafting the `.gitlab-ci.yml` file
In a [`.gitlab-ci.yml`](../../../../ci/yaml/README.md) file in the root of your project,
add the following and replace <S3_bucket_name> with the name of the S3 bucket where you
want to store your package:
```yaml
image: python:latest
stages:
- deploy
production:
stage: deploy
before_script:
- pip3 install awscli --upgrade
- pip3 install aws-sam-cli --upgrade
script:
- sam build
- sam package --output-template-file packaged.yaml --s3-bucket <S3_bucket_name>
- sam deploy --template-file packaged.yaml --stack-name gitlabpoc --s3-bucket <S3_bucket_name> --capabilities CAPABILITY_IAM --region us-east-1
environment: production
```
Let’s examine the config file more closely:
- `image` specifies the Docker image to use for this build. This is the latest Python
image since the sample application is written in Python.
- AWS CLI and AWS SAM CLI are installed in the `before_script` section.
- SAM build, package, and deploy commands are used to build, package, and deploy the
application.
### Deploying your application
Push changes to your GitLab repository and the GitLab build pipeline will automatically
deploy your application. If your:
- Build and deploy are successful, [test your deployed application](#testing-the-deployed-application).
- Build fails, look at the build log to see why the build failed. Some common reasons
the build might fail are:
- Incompatible versions of software. For example, Python runtime version might be
different from the Python on the build machine. Address this by installing the
required versions of the software.
- You may not be able to access your AWS account from GitLab. Check the environment
variables you set up with AWS credentials.
- You may not have permission to deploy a serverless application. Make sure you
provide all required permissions to deploy a serverless application.
### Testing the deployed application
To test the application you deployed, please go to the build log and follow the following steps:
1. Click on “Show complete raw” on the upper right-hand corner:
![sam-complete-raw](img/sam-complete-raw.png)
1. Look for HelloWorldApi – API Gateway endpoint similar to shown below:
![sam-api-endpoint](img/sam-api-endpoint.png)
1. Use curl to test the API. For example:
```shell
curl https://py4rg7qtlg.execute-api.us-east-1.amazonaws.com/Prod/hello/
```
Output should be:
```json
{"message": "hello world"}
```
### Testing Locally
AWS SAM provides functionality to test your applications locally. You must have AWS SAM
CLI installed locally for you to test locally.
First, test the function.
SAM provides a default event in `events/event.json` that includes a message body of:
```json
{\“message\”: \“hello world\”}
```
If you pass that event into the `HelloWorldFunction`, it should respond with the same
body.
Invoke the function by running:
```shell
sam local invoke HelloWorldFunction -e events/event.json
```
Output should be:
```json
{"message": "hello world"}
```
After you confirm that Lambda function is working as expected, test the API Gateway
using following steps.
Start the API locally by running:
```shell
sam local start-api
```
SAM again launches a Docker container, this time with a mocked Amazon API Gateway
listening on `localhost:3000`.
Call the `hello` API by running:
```shell
curl http://127.0.0.1:3000/hello
```
Output again should be:
```json
{"message": "hello world"}
```
......@@ -155,10 +155,17 @@ Multiple metrics can be displayed on the same chart if the fields **Name**, **Ty
#### Query Variables
GitLab supports a limited set of [CI variables](../../../ci/variables/README.md) in the Prometheus query. This is particularly useful for identifying a specific environment, for example with `CI_ENVIRONMENT_SLUG`. The supported variables are:
GitLab supports a limited set of [CI variables](../../../ci/variables/README.md) in the Prometheus query. This is particularly useful for identifying a specific environment, for example with `ci_environment_slug`. The supported variables are:
- CI_ENVIRONMENT_SLUG
- KUBE_NAMESPACE
- `ci_environment_slug`
- `kube_namespace`
- `ci_project_name`
- `ci_project_namespace`
- `ci_project_path`
- `ci_environment_name`
NOTE: **Note:**
Variables for Prometheus queries must be lowercase.
There are 2 methods to specify a variable in a query or dashboard:
......
......@@ -7,7 +7,11 @@ module Gitlab
{
ci_environment_slug: environment.slug,
kube_namespace: environment.deployment_namespace || '',
environment_filter: %{container_name!="POD",environment="#{environment.slug}"}
environment_filter: %{container_name!="POD",environment="#{environment.slug}"},
ci_project_name: environment.project.name,
ci_project_namespace: environment.project.namespace.name,
ci_project_path: environment.project.full_path,
ci_environment_name: environment.name
}
end
end
......
......@@ -2026,10 +2026,10 @@ msgstr ""
msgid "Analyze a review version of your web application."
msgstr ""
msgid "Analyze your dependencies for known vulnerabilities"
msgid "Analyze your dependencies for known vulnerabilities."
msgstr ""
msgid "Analyze your source code for known vulnerabilities"
msgid "Analyze your source code for known vulnerabilities."
msgstr ""
msgid "Ancestors"
......@@ -2509,6 +2509,9 @@ msgstr ""
msgid "Authenticating"
msgstr ""
msgid "Authentication Failure"
msgstr ""
msgid "Authentication Log"
msgstr ""
......@@ -3460,7 +3463,7 @@ msgstr ""
msgid "Check your .gitlab-ci.yml"
msgstr ""
msgid "Check your Docker images for known vulnerabilities"
msgid "Check your Docker images for known vulnerabilities."
msgstr ""
msgid "Checking %{text} availability…"
......@@ -5169,6 +5172,9 @@ msgstr ""
msgid "Connect your external repositories, and CI/CD pipelines will run for new commits. A GitLab project will be created with only CI/CD features enabled."
msgstr ""
msgid "Connected"
msgstr ""
msgid "Connecting"
msgstr ""
......@@ -6373,6 +6379,9 @@ msgstr ""
msgid "Deleted in this version"
msgstr ""
msgid "Deleting"
msgstr ""
msgid "Deleting the license failed."
msgstr ""
......@@ -16986,9 +16995,6 @@ msgstr ""
msgid "Scoped issue boards"
msgstr ""
msgid "Scoped label"
msgstr ""
msgid "Scopes"
msgstr ""
......@@ -17085,7 +17091,7 @@ msgstr ""
msgid "Search users or groups"
msgstr ""
msgid "Search your project dependencies for their licenses and apply policies"
msgid "Search your project dependencies for their licenses and apply policies."
msgstr ""
msgid "Search your projects"
......@@ -17311,7 +17317,7 @@ msgstr ""
msgid "SecurityConfiguration|Feature"
msgstr ""
msgid "SecurityConfiguration|Feature documentation"
msgid "SecurityConfiguration|Feature documentation for %{featureName}"
msgstr ""
msgid "SecurityConfiguration|Not yet configured"
......@@ -18720,6 +18726,9 @@ msgstr ""
msgid "Status:"
msgstr ""
msgid "Status: %{title}"
msgstr ""
msgid "Stay updated about the performance and health of your environment by configuring Prometheus to monitor your deployments."
msgstr ""
......@@ -21101,6 +21110,9 @@ msgstr ""
msgid "Unmarks this %{noun} as Work In Progress."
msgstr ""
msgid "Unreachable"
msgstr ""
msgid "Unresolve"
msgstr ""
......
......@@ -519,7 +519,7 @@ describe 'Issue Boards', :js do
page.within(find('.board:nth-child(2)')) do
expect(page).to have_selector('.board-card', count: 8)
expect(find('.board-card', match: :first)).to have_content(bug.title)
click_button(bug.title)
click_link(bug.title)
wait_for_requests
end
......@@ -536,7 +536,7 @@ describe 'Issue Boards', :js do
it 'removes label filter by clicking label button on issue' do
page.within(find('.board:nth-child(2)')) do
page.within(find('.board-card', match: :first)) do
click_button(bug.title)
click_link(bug.title)
end
wait_for_requests
......
......@@ -305,7 +305,7 @@ describe 'Issue Boards', :js do
end
# 'Development' label does not show since the card is in a 'Development' list label
expect(card).to have_selector('.badge', count: 2)
expect(card).to have_selector('.gl-label', count: 2)
expect(card).to have_content(bug.title)
end
......@@ -335,7 +335,7 @@ describe 'Issue Boards', :js do
end
# 'Development' label does not show since the card is in a 'Development' list label
expect(card).to have_selector('.badge', count: 3)
expect(card).to have_selector('.gl-label', count: 3)
expect(card).to have_content(bug.title)
expect(card).to have_content(regression.title)
end
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
describe MergeRequest::Pipelines do
describe Ci::PipelinesForMergeRequestFinder do
describe '#all' do
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.source_project }
......
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Blob Header Editing rendering matches the snapshot 1`] = `
<div
class="file-content code"
>
<pre
data-editor-loading=""
id="editor"
>
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
</pre>
</div>
`;
import { shallowMount } from '@vue/test-utils';
import BlobEditContent from '~/blob/components/blob_edit_content.vue';
import { initEditorLite } from '~/blob/utils';
import { nextTick } from 'vue';
jest.mock('~/blob/utils', () => ({
initEditorLite: jest.fn(),
}));
describe('Blob Header Editing', () => {
let wrapper;
const value = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
const fileName = 'lorem.txt';
function createComponent() {
wrapper = shallowMount(BlobEditContent, {
propsData: {
value,
fileName,
},
});
}
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
describe('rendering', () => {
it('matches the snapshot', () => {
expect(wrapper.element).toMatchSnapshot();
});
it('renders content', () => {
expect(wrapper.text()).toContain(value);
});
});
describe('functionality', () => {
it('initialises Editor Lite', () => {
const el = wrapper.find({ ref: 'editor' }).element;
expect(initEditorLite).toHaveBeenCalledWith({
el,
blobPath: fileName,
blobContent: value,
});
});
it('reacts to the changes in fileName', () => {
wrapper.vm.editor = {
updateModelLanguage: jest.fn(),
};
const newFileName = 'ipsum.txt';
wrapper.setProps({
fileName: newFileName,
});
return nextTick().then(() => {
expect(wrapper.vm.editor.updateModelLanguage).toHaveBeenCalledWith(newFileName);
});
});
it('emits input event when the blob content is changed', () => {
const editorEl = wrapper.find({ ref: 'editor' });
wrapper.vm.editor = {
getValue: jest.fn().mockReturnValue(value),
};
editorEl.trigger('focusout');
return nextTick().then(() => {
expect(wrapper.emitted().input[0]).toEqual([value]);
});
});
});
});
import Editor from '~/editor/editor_lite';
import * as utils from '~/blob/utils';
const mockCreateMonacoInstance = jest.fn();
jest.mock('~/editor/editor_lite', () => {
return jest.fn().mockImplementation(() => {
return { createInstance: mockCreateMonacoInstance };
});
});
const mockCreateAceInstance = jest.fn();
global.ace = {
edit: mockCreateAceInstance,
};
describe('Blob utilities', () => {
beforeEach(() => {
Editor.mockClear();
});
describe('initEditorLite', () => {
let editorEl;
const blobPath = 'foo.txt';
const blobContent = 'Foo bar';
beforeEach(() => {
setFixtures('<div id="editor"></div>');
editorEl = document.getElementById('editor');
});
describe('Monaco editor', () => {
let origProp;
beforeEach(() => {
origProp = window.gon;
window.gon = {
features: {
monacoSnippets: true,
},
};
});
afterEach(() => {
window.gon = origProp;
});
it('initializes the Editor Lite', () => {
utils.initEditorLite({ el: editorEl });
expect(Editor).toHaveBeenCalled();
});
it('creates the instance with the passed parameters', () => {
utils.initEditorLite({ el: editorEl });
expect(mockCreateMonacoInstance.mock.calls[0]).toEqual([
{
el: editorEl,
blobPath: undefined,
blobContent: undefined,
},
]);
utils.initEditorLite({ el: editorEl, blobPath, blobContent });
expect(mockCreateMonacoInstance.mock.calls[1]).toEqual([
{
el: editorEl,
blobPath,
blobContent,
},
]);
});
});
describe('ACE editor', () => {
let origProp;
beforeEach(() => {
origProp = window.gon;
window.gon = {
features: {
monacoSnippets: false,
},
};
});
afterEach(() => {
window.gon = origProp;
});
it('does not initialize the Editor Lite', () => {
utils.initEditorLite({ el: editorEl });
expect(Editor).not.toHaveBeenCalled();
expect(mockCreateAceInstance).toHaveBeenCalledWith(editorEl);
});
});
});
});
import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import IssueCardInnerScopedLabel from '~/boards/components/issue_card_inner_scoped_label.vue';
describe('IssueCardInnerScopedLabel Component', () => {
let wrapper;
beforeEach(() => {
wrapper = shallowMount(IssueCardInnerScopedLabel, {
propsData: {
label: { title: 'Foo::Bar', description: 'Some Random Description' },
labelStyle: { background: 'white', color: 'black' },
scopedLabelsDocumentationLink: '/docs-link',
},
});
});
afterEach(() => {
wrapper.destroy();
});
it('should render label title', () => {
expect(wrapper.find('.color-label').text()).toBe('Foo::Bar');
});
it('should render question mark symbol', () => {
expect(wrapper.find('.fa-question-circle').exists()).toBe(true);
});
it('should render label style provided', () => {
const label = wrapper.find('.color-label');
expect(label.attributes('style')).toContain('background: white;');
expect(label.attributes('style')).toContain('color: black;');
});
it('should render the docs link', () => {
expect(wrapper.find(GlLink).attributes('href')).toBe('/docs-link');
});
});
......@@ -8,6 +8,7 @@ import '~/boards/models/list';
import IssueCardInner from '~/boards/components/issue_card_inner.vue';
import { listObj } from '../../javascripts/boards/mock_data';
import store from '~/boards/stores';
import { GlLabel } from '@gitlab/ui';
describe('Issue card component', () => {
const user = new ListAssignee({
......@@ -20,7 +21,7 @@ describe('Issue card component', () => {
const label1 = new ListLabel({
id: 3,
title: 'testing 123',
color: 'blue',
color: '#000CFF',
text_color: 'white',
description: 'test',
});
......@@ -50,6 +51,9 @@ describe('Issue card component', () => {
rootPath: '/',
},
store,
stubs: {
GlLabel: true,
},
});
});
......@@ -290,25 +294,11 @@ describe('Issue card component', () => {
});
it('does not render list label but renders all other labels', () => {
expect(wrapper.findAll('.badge').length).toBe(1);
});
it('renders label', () => {
const nodes = wrapper.findAll('.badge').wrappers.map(label => label.attributes('title'));
expect(nodes.includes(label1.description)).toBe(true);
});
it('sets label description as title', () => {
expect(wrapper.find('.badge').attributes('title')).toContain(label1.description);
});
it('sets background color of button', () => {
const nodes = wrapper
.findAll('.badge')
.wrappers.map(label => label.element.style.backgroundColor);
expect(nodes.includes(label1.color)).toBe(true);
expect(wrapper.findAll(GlLabel).length).toBe(1);
const label = wrapper.find(GlLabel);
expect(label.props('title')).toEqual(label1.title);
expect(label.props('description')).toEqual(label1.description);
expect(label.props('backgroundColor')).toEqual(label1.color);
});
it('does not render label if label does not have an ID', done => {
......@@ -321,7 +311,7 @@ describe('Issue card component', () => {
wrapper.vm
.$nextTick()
.then(() => {
expect(wrapper.findAll('.badge').length).toBe(1);
expect(wrapper.findAll(GlLabel).length).toBe(1);
expect(wrapper.text()).not.toContain('closed');
done();
})
......
import Vuex from 'vuex';
import { createLocalVue, mount } from '@vue/test-utils';
import { GlTable, GlLoadingIcon } from '@gitlab/ui';
import Clusters from '~/clusters_list/components/clusters.vue';
import Vuex from 'vuex';
import mockData from '../mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
......@@ -11,9 +12,10 @@ describe('Clusters', () => {
const findTable = () => wrapper.find(GlTable);
const findLoader = () => wrapper.find(GlLoadingIcon);
const findStatuses = () => findTable().findAll('.js-status');
const mountComponent = _state => {
const state = { clusters: [], endpoint: 'some/endpoint', ..._state };
const state = { clusters: mockData, endpoint: 'some/endpoint', ..._state };
const store = new Vuex.Store({
state,
});
......@@ -52,4 +54,25 @@ describe('Clusters', () => {
expect(findTable().classes()).toContain('b-table-stacked-md');
});
});
describe('cluster status', () => {
it.each`
statusName | className | lineNumber
${'disabled'} | ${'disabled'} | ${0}
${'unreachable'} | ${'bg-danger'} | ${1}
${'authentication_failure'} | ${'bg-warning'} | ${2}
${'deleting'} | ${null} | ${3}
${'connected'} | ${'bg-success'} | ${4}
`('renders a status for each cluster', ({ statusName, className, lineNumber }) => {
const statuses = findStatuses();
const status = statuses.at(lineNumber);
if (statusName !== 'deleting') {
const statusIndicator = status.find('.cluster-status-indicator');
expect(statusIndicator.exists()).toBe(true);
expect(statusIndicator.classes()).toContain(className);
} else {
expect(status.find(GlLoadingIcon).exists()).toBe(true);
}
});
});
});
export default [
{
name: 'My Cluster 1',
environmentScope: '*',
size: '3',
clusterType: 'group_type',
status: 'disabled',
},
{
name: 'My Cluster 2',
environmentScope: 'development',
size: '12',
clusterType: 'project_type',
status: 'unreachable',
},
{
name: 'My Cluster 3',
environmentScope: 'development',
size: '12',
clusterType: 'project_type',
status: 'authentication_failure',
},
{
name: 'My Cluster 4',
environmentScope: 'production',
size: '12',
clusterType: 'project_type',
status: 'deleting',
},
{
name: 'My Cluster 5',
environmentScope: 'development',
size: '12',
clusterType: 'project_type',
status: 'connected',
},
];
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Snippet Blob Edit component rendering matches the snapshot 1`] = `
<div
class="form-group file-editor"
>
<label>
File
</label>
<div
class="file-holder snippet"
>
<blob-header-edit-stub
value="lorem.txt"
/>
<blob-content-edit-stub
filename="lorem.txt"
value="Lorem ipsum dolor sit amet, consectetur adipiscing elit."
/>
</div>
</div>
`;
import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue';
import BlobContentEdit from '~/blob/components/blob_edit_content.vue';
import { shallowMount } from '@vue/test-utils';
jest.mock('~/blob/utils', () => jest.fn());
describe('Snippet Blob Edit component', () => {
let wrapper;
const content = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
const fileName = 'lorem.txt';
function createComponent() {
wrapper = shallowMount(SnippetBlobEdit, {
propsData: {
content,
fileName,
},
});
}
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
describe('rendering', () => {
it('matches the snapshot', () => {
expect(wrapper.element).toMatchSnapshot();
});
it('renders required components', () => {
expect(wrapper.contains(BlobHeaderEdit)).toBe(true);
expect(wrapper.contains(BlobContentEdit)).toBe(true);
});
});
});
import { mount } from '@vue/test-utils';
import { hexToRgb } from '~/lib/utils/color_utils';
import DropdownValueComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_value.vue';
import DropdownValueScopedLabel from '~/vue_shared/components/sidebar/labels_select/dropdown_value_scoped_label.vue';
import { GlLabel } from '@gitlab/ui';
import {
mockConfig,
mockLabels,
} from '../../../../../javascripts/vue_shared/components/sidebar/labels_select/mock_data';
const labelStyles = {
textColor: '#FFFFFF',
color: '#BADA55',
};
const createComponent = (
labels = mockLabels,
labelFilterBasePath = mockConfig.labelFilterBasePath,
) => {
labels.forEach(label => Object.assign(label, labelStyles));
return mount(DropdownValueComponent, {
) =>
mount(DropdownValueComponent, {
propsData: {
labels,
labelFilterBasePath,
enableScopedLabels: true,
},
stubs: {
GlLabel: true,
},
});
};
describe('DropdownValueComponent', () => {
let vm;
......@@ -56,24 +51,17 @@ describe('DropdownValueComponent', () => {
describe('methods', () => {
describe('labelFilterUrl', () => {
it('returns URL string starting with labelFilterBasePath and encoded label.title', () => {
expect(vm.find(DropdownValueScopedLabel).props('labelFilterUrl')).toBe(
'/gitlab-org/my-project/issues?label_name[]=Foo%3A%3ABar',
expect(vm.find(GlLabel).props('target')).toBe(
'/gitlab-org/my-project/issues?label_name[]=Foo%20Label',
);
});
});
describe('labelStyle', () => {
it('returns object with `color` & `backgroundColor` properties from label.textColor & label.color', () => {
expect(vm.find(DropdownValueScopedLabel).props('labelStyle')).toEqual({
color: labelStyles.textColor,
backgroundColor: labelStyles.color,
});
});
});
describe('showScopedLabels', () => {
it('returns true if the label is scoped label', () => {
expect(vm.findAll(DropdownValueScopedLabel).length).toEqual(1);
const labels = vm.findAll(GlLabel);
expect(labels.length).toEqual(2);
expect(labels.at(1).props('scoped')).toBe(true);
});
});
});
......@@ -95,33 +83,10 @@ describe('DropdownValueComponent', () => {
vmEmptyLabels.destroy();
});
it('renders label element with filter URL', () => {
expect(vm.find('a').attributes('href')).toBe(
'/gitlab-org/my-project/issues?label_name[]=Foo%20Label',
);
});
it('renders label element and styles based on label details', () => {
const labelEl = vm.find('a span.badge.color-label');
it('renders DropdownValueComponent element', () => {
const labelEl = vm.find(GlLabel);
expect(labelEl.exists()).toBe(true);
expect(labelEl.attributes('style')).toContain(
`background-color: rgb(${hexToRgb(labelStyles.color).join(', ')});`,
);
expect(labelEl.text().trim()).toBe(mockLabels[0].title);
});
describe('label is of scoped-label type', () => {
it('renders a scoped-label-wrapper span to incorporate 2 anchors', () => {
expect(vm.find('span.scoped-label-wrapper').exists()).toBe(true);
});
it('renders anchor tag containing question icon', () => {
const anchor = vm.find('.scoped-label-wrapper a.scoped-label');
expect(anchor.exists()).toBe(true);
expect(anchor.find('i.fa-question-circle').exists()).toBe(true);
});
});
});
});
......@@ -32,7 +32,7 @@ describe('Board card', () => {
const label1 = new ListLabel({
id: 3,
title: 'testing 123',
color: 'blue',
color: '#000cff',
text_color: 'white',
description: 'test',
});
......@@ -155,12 +155,6 @@ describe('Board card', () => {
expect(boardsStore.detail.issue).toEqual({});
});
it('does not set detail issue if button is clicked', () => {
triggerEvent('mouseup', vm.$el.querySelector('button'));
expect(boardsStore.detail.issue).toEqual({});
});
it('does not set detail issue if img is clicked', done => {
vm.issue.assignees = [
new ListAssignee({
......
......@@ -11,6 +11,10 @@ describe Gitlab::Prometheus::QueryVariables do
subject { described_class.call(environment) }
it { is_expected.to include(ci_environment_slug: slug) }
it { is_expected.to include(ci_project_name: project.name) }
it { is_expected.to include(ci_project_namespace: project.namespace.name) }
it { is_expected.to include(ci_project_path: project.full_path) }
it { is_expected.to include(ci_environment_name: environment.name) }
it do
is_expected.to include(environment_filter:
......
......@@ -796,15 +796,15 @@
dependencies:
vue-eslint-parser "^7.0.0"
"@gitlab/svgs@^1.105.0":
version "1.105.0"
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.105.0.tgz#9686f8696594a5f22de11af2b81fdcceb715f4f2"
integrity sha512-2wzZXe2b7DnGyL7FTbPq0dSpk+gjkq4SBTNtMrqdwX2qaM+XJB50XaMm17kdY5V1bBkMgbc7JJ2vgbLxhS/CkQ==
"@gitlab/svgs@^1.110.0":
version "1.110.0"
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.110.0.tgz#3c4f5f0e78fcf616ec63a265754158b84ed80af8"
integrity sha512-bLVUW9Hj6j7zTdeoQELO3Bls5xDKr6AoSEU8gZbEZKLK9PV81hxRl/lJPJUo1qt4E7eJGapCTlH73tTIL4OZ3A==
"@gitlab/ui@^9.21.1":
version "9.21.1"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.21.1.tgz#76da9b86de959c2757a0c0a9389970f8d5afcc47"
integrity sha512-nJi2lFYq3WFXDNlH5vAg1Mb3Tf/PKnaVIm5W07I+hIWj/GALnwZHO3WHJuhwWIUTZOtLz7egIr4Wyh3EqBk+cg==
"@gitlab/ui@^9.23.0":
version "9.23.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.23.0.tgz#0ad0232c529d1f8a386c8e86159e273111a55686"
integrity sha512-1VOob5tNPB3zjLHeTuMbQBMG3q6LF36iCq6XqH5eeYzpAI42zj/WhY5T47RKrfvlkflWRSUPTarGo97pQqIKzg==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0"
......@@ -6872,10 +6872,10 @@ js-beautify@^1.6.12, js-beautify@^1.8.8:
mkdirp "~0.5.1"
nopt "~4.0.1"
js-cookie@^2.1.3:
version "2.1.3"
resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.1.3.tgz#48071625217ac9ecfab8c343a13d42ec09ff0526"
integrity sha1-SAcWJSF6yez6uMNDoT1C7An/BSY=
js-cookie@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.2.1.tgz#69e106dc5d5806894562902aa5baec3744e9b2b8"
integrity sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==
"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0:
version "4.0.0"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册