提交 d9aac35d 编写于 作者: G GitLab Bot

Add latest changes from gitlab-org/gitlab@master

上级 ff430539
......@@ -303,6 +303,11 @@ Performance/Detect:
RSpec/ContextWording:
Enabled: false
# Offense count: 626
# Cop supports --auto-correct.
RSpec/EmptyLineAfterLetBlock:
Enabled: false
# Offense count: 1121
# Cop supports --auto-correct.
# Configuration parameters: EnforcedStyle.
......
1fae8b36711542e66b772164e159f29c14bb4ce4
a75309cec88ed34f594a4f6514bb0bb2aef7fcd5
......@@ -365,7 +365,7 @@ group :development, :test do
gem 'spring', '~> 2.0.0'
gem 'spring-commands-rspec', '~> 1.0.4'
gem 'gitlab-styles', '~> 4.2.0', require: false
gem 'gitlab-styles', '~> 4.3.0', require: false
# Pin these dependencies, otherwise a new rule could break the CI pipelines
gem 'rubocop', '~> 0.82.0'
gem 'rubocop-performance', '~> 1.5.2'
......
......@@ -414,7 +414,7 @@ GEM
gitlab-puma (>= 2.7, < 5)
gitlab-sidekiq-fetcher (0.5.2)
sidekiq (~> 5)
gitlab-styles (4.2.0)
gitlab-styles (4.3.0)
rubocop (~> 0.82.0)
rubocop-gitlab-security (~> 0.1.0)
rubocop-performance (~> 1.5.2)
......@@ -1262,7 +1262,7 @@ DEPENDENCIES
gitlab-puma (~> 4.3.3.gitlab.2)
gitlab-puma_worker_killer (~> 0.1.1.gitlab.1)
gitlab-sidekiq-fetcher (= 0.5.2)
gitlab-styles (~> 4.2.0)
gitlab-styles (~> 4.3.0)
gitlab_chronic_duration (~> 0.10.6.2)
gitlab_omniauth-ldap (~> 2.1.1)
gon (~> 6.2)
......
......@@ -44,6 +44,8 @@ const initialPaginationState = {
lastPageSize: null,
};
const TWELVE_HOURS_IN_MS = 12 * 60 * 60 * 1000;
export default {
i18n: {
noAlertsMsg: s__(
......@@ -149,9 +151,20 @@ export default {
update(data) {
const { alertManagementAlerts: { nodes: list = [], pageInfo = {} } = {} } =
data.project || {};
const now = new Date();
const listWithData = list.map(alert => {
const then = new Date(alert.startedAt);
const diff = now - then;
return {
...alert,
isNew: diff < TWELVE_HOURS_IN_MS,
};
});
return {
list,
list: listWithData,
pageInfo,
};
},
......@@ -207,9 +220,6 @@ export default {
hasAlerts() {
return this.alerts?.list?.length;
},
tbodyTrClass() {
return !this.loading && this.hasAlerts ? bodyTrClass : '';
},
showPaginationControls() {
return Boolean(this.prevPage || this.nextPage);
},
......@@ -290,6 +300,12 @@ export default {
resetPagination() {
this.pagination = initialPaginationState;
},
tbodyTrClass(item) {
return {
[bodyTrClass]: !this.loading && this.hasAlerts,
'new-alert': item?.isNew,
};
},
handleAlertError(errorMessage) {
this.errored = true;
this.errorMessage = errorMessage;
......
......@@ -14,7 +14,6 @@ import {
GlFormSelect,
} from '@gitlab/ui';
import { debounce } from 'lodash';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import ToggleButton from '~/vue_shared/components/toggle_button.vue';
import csrf from '~/lib/utils/csrf';
......@@ -42,7 +41,6 @@ export default {
directives: {
'gl-modal': GlModalDirective,
},
mixins: [glFeatureFlagsMixin()],
props: {
prometheus: {
type: Object,
......@@ -129,7 +127,7 @@ export default {
return !this.isGeneric ? this.$options.i18n.prometheusInfo : '';
},
prometheusFeatureEnabled() {
return !this.isGeneric && this.glFeatures.alertIntegrationsDropdown;
return !this.isGeneric;
},
jsonIsValid() {
return this.testAlert.error === null;
......@@ -147,13 +145,9 @@ export default {
}, JSON_VALIDATE_DELAY),
},
created() {
if (this.glFeatures.alertIntegrationsDropdown) {
this.selectedEndpoint = this.prometheus.prometheusIsActivated
? this.options[1].value
: this.options[0].value;
} else {
this.selectedEndpoint = this.options[0].value;
}
this.selectedEndpoint = this.prometheus.prometheusIsActivated
? this.options[1].value
: this.options[0].value;
},
methods: {
clearJson() {
......@@ -187,9 +181,6 @@ export default {
},
toggleService(value) {
this.canSaveForm = true;
if (!this.glFeatures.alertIntegrationsDropdown) {
this.toggleActivated(value);
}
if (this.isGeneric) {
this.activated.generic = value;
......@@ -334,7 +325,6 @@ export default {
</div>
<gl-form @submit.prevent="onSubmit" @reset.prevent="onReset">
<gl-form-group
v-if="glFeatures.alertIntegrationsDropdown"
:label="$options.i18n.integrationsLabel"
label-for="integrations"
label-class="label-bold"
......@@ -433,7 +423,6 @@ export default {
</gl-modal>
</gl-form-group>
<gl-form-group
v-if="glFeatures.alertIntegrationsDropdown"
:label="$options.i18n.alertJson"
label-for="alert-json"
label-class="label-bold"
......@@ -452,10 +441,7 @@ export default {
<gl-button :disabled="!canTestAlert" @click="validateTestAlert">{{
$options.i18n.testAlertInfo
}}</gl-button>
<div
v-if="glFeatures.alertIntegrationsDropdown"
class="footer-block row-content-block gl-display-flex gl-justify-content-space-between"
>
<div class="footer-block row-content-block gl-display-flex gl-justify-content-space-between">
<gl-button type="submit" variant="success" category="primary" :disabled="!canSaveConfig">
{{ __('Save changes') }}
</gl-button>
......
......@@ -281,7 +281,7 @@ export default {
</board-delete>
<div
v-if="showBoardListAndBoardInfo"
class="issue-count-badge gl-pr-0 no-drag text-secondary"
class="issue-count-badge gl-display-inline-flex gl-pr-0 no-drag text-secondary"
:class="{ 'gl-display-none': !list.isExpanded && isSwimlanesHeader }"
>
<span class="gl-display-inline-flex">
......
......@@ -5,8 +5,9 @@ import { n__ } from '~/locale';
import getIssuesListDetailsQuery from '../queries/get_issues_list_details.query.graphql';
import {
calculateJiraImportLabel,
isFinished,
isInProgress,
setFinishedAlertHideMap,
shouldShowFinishedAlert,
} from '~/jira_import/utils/jira_import_utils';
export default {
......@@ -35,8 +36,6 @@ export default {
},
data() {
return {
isFinishedAlertShowing: true,
isInProgressAlertShowing: true,
jiraImport: {},
};
},
......@@ -48,15 +47,18 @@ export default {
fullPath: this.projectPath,
};
},
update: ({ project }) => ({
importedIssuesCount: last(project.jiraImports.nodes)?.importedIssuesCount,
isInProgress: isInProgress(project.jiraImportStatus),
isFinished: isFinished(project.jiraImportStatus),
label: calculateJiraImportLabel(
update: ({ project }) => {
const label = calculateJiraImportLabel(
project.jiraImports.nodes,
project.issues.nodes.flatMap(({ labels }) => labels.nodes),
),
}),
);
return {
importedIssuesCount: last(project.jiraImports.nodes)?.importedIssuesCount,
label,
shouldShowFinishedAlert: shouldShowFinishedAlert(label.title, project.jiraImportStatus),
shouldShowInProgressAlert: isInProgress(project.jiraImportStatus),
};
},
skip() {
return !this.isJiraConfigured || !this.canEdit;
},
......@@ -73,19 +75,14 @@ export default {
labelTarget() {
return `${this.issuesPath}?label_name[]=${encodeURIComponent(this.jiraImport.label.title)}`;
},
shouldShowFinishedAlert() {
return this.isFinishedAlertShowing && this.jiraImport.isFinished;
},
shouldShowInProgressAlert() {
return this.isInProgressAlertShowing && this.jiraImport.isInProgress;
},
},
methods: {
hideFinishedAlert() {
this.isFinishedAlertShowing = false;
setFinishedAlertHideMap(this.jiraImport.label.title);
this.jiraImport.shouldShowFinishedAlert = false;
},
hideInProgressAlert() {
this.isInProgressAlertShowing = false;
this.jiraImport.shouldShowInProgressAlert = false;
},
},
};
......@@ -93,10 +90,15 @@ export default {
<template>
<div class="issuable-list-root">
<gl-alert v-if="shouldShowInProgressAlert" @dismiss="hideInProgressAlert">
<gl-alert v-if="jiraImport.shouldShowInProgressAlert" @dismiss="hideInProgressAlert">
{{ __('Import in progress. Refresh page to see newly added issues.') }}
</gl-alert>
<gl-alert v-if="shouldShowFinishedAlert" variant="success" @dismiss="hideFinishedAlert">
<gl-alert
v-if="jiraImport.shouldShowFinishedAlert"
variant="success"
@dismiss="hideFinishedAlert"
>
{{ finishedMessage }}
<gl-label
:background-color="jiraImport.label.color"
......
......@@ -52,3 +52,5 @@ export const availableSortOptionsJira = [
},
},
];
export const JIRA_IMPORT_SUCCESS_ALERT_HIDE_MAP_KEY = 'jira-import-success-alert-hide-map';
import { last } from 'lodash';
import { JIRA_IMPORT_SUCCESS_ALERT_HIDE_MAP_KEY } from '~/issuables_list/constants';
export const IMPORT_STATE = {
FAILED: 'failed',
......@@ -68,3 +69,36 @@ export const calculateJiraImportLabel = (jiraImports, labels) => {
title,
};
};
/**
* Calculates whether the Jira import success alert should be shown.
*
* @param {string} labelTitle - Jira import label, for checking localStorage
* @param {string} importStatus - Jira import status
* @returns {boolean} - A boolean indicating whether to show the success alert
*/
export const shouldShowFinishedAlert = (labelTitle, importStatus) => {
const finishedAlertHideMap =
JSON.parse(localStorage.getItem(JIRA_IMPORT_SUCCESS_ALERT_HIDE_MAP_KEY)) || {};
const shouldHide = finishedAlertHideMap[labelTitle];
return !shouldHide && isFinished(importStatus);
};
/**
* Updates the localStorage map to permanently hide the Jira import success alert
*
* @param {string} labelTitle - Jira import label, for checking localStorage
*/
export const setFinishedAlertHideMap = labelTitle => {
const finishedAlertHideMap =
JSON.parse(localStorage.getItem(JIRA_IMPORT_SUCCESS_ALERT_HIDE_MAP_KEY)) || {};
finishedAlertHideMap[labelTitle] = true;
localStorage.setItem(
JIRA_IMPORT_SUCCESS_ALERT_HIDE_MAP_KEY,
JSON.stringify(finishedAlertHideMap),
);
};
.alert-management-list {
.new-alert {
background-color: $issues-today-bg;
}
// these styles need to be deleted once GlTable component looks in GitLab same as in @gitlab/ui
table {
color: $gray-700;
......
......@@ -81,34 +81,36 @@ module IssuableCollections
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def finder_options
params[:state] = default_state if params[:state].blank?
options = {
scope: params[:scope],
state: params[:state],
confidential: Gitlab::Utils.to_boolean(params[:confidential]),
sort: set_sort_order
}
# Used by view to highlight active option
@sort = options[:sort]
# When a user looks for an exact iid, we do not filter by search but only by iid
if params[:search] =~ /^#(?<iid>\d+)\z/
options[:iids] = Regexp.last_match[:iid]
params[:search] = nil
strong_memoize(:finder_options) do
params[:state] = default_state if params[:state].blank?
options = {
scope: params[:scope],
state: params[:state],
confidential: Gitlab::Utils.to_boolean(params[:confidential]),
sort: set_sort_order
}
# Used by view to highlight active option
@sort = options[:sort]
# When a user looks for an exact iid, we do not filter by search but only by iid
if params[:search] =~ /^#(?<iid>\d+)\z/
options[:iids] = Regexp.last_match[:iid]
params[:search] = nil
end
if @project
options[:project_id] = @project.id
options[:attempt_project_search_optimizations] = true
elsif @group
options[:group_id] = @group.id
options[:include_subgroups] = true
options[:attempt_group_search_optimizations] = true
end
params.permit(finder_type.valid_params).merge(options)
end
if @project
options[:project_id] = @project.id
options[:attempt_project_search_optimizations] = true
elsif @group
options[:group_id] = @group.id
options[:include_subgroups] = true
options[:attempt_group_search_optimizations] = true
end
params.permit(finder_type.valid_params).merge(options)
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
......
......@@ -11,11 +11,11 @@ class Projects::IssuesController < Projects::ApplicationController
include RecordUserLastActivity
def issue_except_actions
%i[index calendar new create bulk_update import_csv export_csv]
%i[index calendar new create bulk_update import_csv export_csv service_desk]
end
def set_issuables_index_only_actions
%i[index calendar]
%i[index calendar service_desk]
end
prepend_before_action(only: [:index]) { authenticate_sessionless_user!(:rss) }
......@@ -223,6 +223,11 @@ class Projects::IssuesController < Projects::ApplicationController
redirect_to project_issues_path(project)
end
def service_desk
@issues = @issuables # rubocop:disable Gitlab/ModuleWithInstanceVariables
@users.push(User.support_bot) # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
protected
def sorting_field
......@@ -320,6 +325,17 @@ class Projects::IssuesController < Projects::ApplicationController
private
def finder_options
options = super
return options unless service_desk?
options.reject! { |key| key == 'author_username' || key == 'author_id' }
options[:author_id] = User.support_bot
options
end
def branch_link(branch)
project_compare_path(project, from: project.default_branch, to: branch[:name])
end
......@@ -337,6 +353,10 @@ class Projects::IssuesController < Projects::ApplicationController
def rate_limiter
::Gitlab::ApplicationRateLimiter
end
def service_desk?
action_name == 'service_desk'
end
end
Projects::IssuesController.prepend_if_ee('EE::Projects::IssuesController')
# frozen_string_literal: true
class Projects::ServiceDeskController < Projects::ApplicationController
before_action :authorize_admin_project!
def show
json_response
end
def update
Projects::UpdateService.new(project, current_user, { service_desk_enabled: params[:service_desk_enabled] }).execute
result = ServiceDeskSettings::UpdateService.new(project, current_user, setting_params).execute
if result[:status] == :success
json_response
else
render json: { message: result[:message] }, status: :unprocessable_entity
end
end
private
def setting_params
params.permit(:issue_template_key, :outgoing_name, :project_key)
end
def json_response
respond_to do |format|
service_desk_settings = project.service_desk_setting
service_desk_attributes =
{
service_desk_address: project.service_desk_address,
service_desk_enabled: project.service_desk_enabled,
issue_template_key: service_desk_settings&.issue_template_key,
template_file_missing: service_desk_settings&.issue_template_missing?,
outgoing_name: service_desk_settings&.outgoing_name,
project_key: service_desk_settings&.project_key
}
format.json { render json: service_desk_attributes }
end
end
end
......@@ -5,10 +5,6 @@ module Projects
class OperationsController < Projects::ApplicationController
before_action :authorize_admin_operations!
before_action :authorize_read_prometheus_alerts!, only: [:reset_alerting_token]
before_action do
push_frontend_feature_flag(:alert_integrations_dropdown, project)
end
respond_to :json, only: [:reset_alerting_token]
helper_method :error_tracking_setting
......
......@@ -38,6 +38,7 @@ class ProjectsController < Projects::ApplicationController
before_action only: [:new, :create] do
frontend_experimentation_tracking_data(:new_create_project_ui, 'click_tab')
push_frontend_feature_flag(:new_create_project_ui) if experiment_enabled?(:new_create_project_ui)
push_frontend_feature_flag(:service_desk_custom_address, @project)
end
layout :determine_layout
......@@ -391,6 +392,7 @@ class ProjectsController < Projects::ApplicationController
:initialize_with_readme,
:autoclose_referenced_issues,
:suggestion_commit_message,
:service_desk_enabled,
project_feature_attributes: %i[
builds_access_level
......
# frozen_string_literal: true
module Ci
module RunnersHelper
def runner_status_icon(runner)
status = runner.status
case status
when :not_connected
content_tag :i, nil,
class: "fa fa-warning",
title: "New runner. Has not connected yet"
when :online, :offline, :paused
content_tag :i, nil,
class: "fa fa-circle runner-status-#{status}",
title: "Runner is #{status}, last contact was #{time_ago_in_words(runner.contacted_at)} ago"
end
end
def runner_link(runner)
display_name = truncate(runner.display_name, length: 15)
id = "\##{runner.id}"
if current_user && current_user.admin
link_to admin_runner_path(runner) do
display_name + id
end
else
display_name + id
end
end
# Due to inability of performing sorting of runners by cached "contacted_at" values we have to show uncached values if sorting by "contacted_asc" is requested.
# Please refer to the following issue for more details: https://gitlab.com/gitlab-org/gitlab-foss/issues/55920
def runner_contacted_at(runner)
if params[:sort] == 'contacted_asc'
runner.uncached_contacted_at
else
runner.contacted_at
end
end
end
end
Ci::RunnersHelper.prepend_if_ee('EE::Ci::RunnersHelper')
# frozen_string_literal: true
module RunnersHelper
def runner_status_icon(runner)
status = runner.status
case status
when :not_connected
content_tag :i, nil,
class: "fa fa-warning",
title: "New runner. Has not connected yet"
when :online, :offline, :paused
content_tag :i, nil,
class: "fa fa-circle runner-status-#{status}",
title: "Runner is #{status}, last contact was #{time_ago_in_words(runner.contacted_at)} ago"
end
end
def runner_link(runner)
display_name = truncate(runner.display_name, length: 15)
id = "\##{runner.id}"
if current_user && current_user.admin
link_to admin_runner_path(runner) do
display_name + id
end
else
display_name + id
end
end
# Due to inability of performing sorting of runners by cached "contacted_at" values we have to show uncached values if sorting by "contacted_asc" is requested.
# Please refer to the following issue for more details: https://gitlab.com/gitlab-org/gitlab-foss/issues/55920
def runner_contacted_at(runner)
if params[:sort] == 'contacted_asc'
runner.uncached_contacted_at
else
runner.contacted_at
end
end
end
RunnersHelper.prepend_if_ee('EE::RunnersHelper')
......@@ -19,11 +19,22 @@ module Issues
notify_participants
# Updates old issue sent notifications allowing
# to receive service desk emails on the new moved issue.
update_service_desk_sent_notifications
new_entity
end
private
def update_service_desk_sent_notifications
return unless original_entity.from_service_desk?
original_entity
.sent_notifications.update_all(project_id: new_entity.project_id, noteable_id: new_entity.id)
end
def update_old_entity
super
......
......@@ -294,6 +294,7 @@ class NotificationService
return true if note.system_note_with_references?
send_new_note_notifications(note)
send_service_desk_notification(note)
end
def send_new_note_notifications(note)
......@@ -305,6 +306,21 @@ class NotificationService
end
end
def send_service_desk_notification(note)
return unless Gitlab::ServiceDesk.supported?
return unless note.noteable_type == 'Issue'
issue = note.noteable
support_bot = User.support_bot
return unless issue.service_desk_reply_to.present?
return unless issue.project.service_desk_enabled?
return if note.author == support_bot
return unless issue.subscribed?(support_bot, issue.project)
mailer.service_desk_new_note_email(issue.id, note.id).deliver_later
end
# Notify users when a new release is created
def send_new_release_notifications(release)
recipients = NotificationRecipients::BuildService.build_new_release_recipients(release)
......
# frozen_string_literal: true
module ServiceDeskSettings
class UpdateService < BaseService
def execute
settings = ServiceDeskSetting.safe_find_or_create_by!(project_id: project.id)
unless ::Feature.enabled?(:service_desk_custom_address, project)
params.delete(:project_key)
end
if settings.update(params)
success
else
error(settings.errors.full_messages.to_sentence)
end
end
end
end
......@@ -5,4 +5,4 @@
- authorization_key = @project.alerting_setting.try(:token)
- learn_more_url = help_page_path('user/project/integrations/prometheus', anchor: 'external-prometheus-instances')
#js-settings-prometheus-alerts{ data: { notify_url: notify_url, authorization_key: authorization_key, change_key_url: reset_alerting_token_project_settings_operations_path(@project), learn_more_url: learn_more_url, disabled: Feature.enabled?(:alert_integrations_dropdown, @service.project) && @service.manual_configuration? } }
#js-settings-prometheus-alerts{ data: { notify_url: notify_url, authorization_key: authorization_key, change_key_url: reset_alerting_token_project_settings_operations_path(@project), learn_more_url: learn_more_url, disabled: true } }
- return unless Feature.enabled?(:alert_integrations_dropdown, @service.project) && @service.manual_configuration?
- return unless @service.manual_configuration?
.row
.col-lg-12
......
---
title: Add a Rake task to fix incorrectly-recorded external diffs
merge_request: 36353
author:
type: fixed
---
title: Permanently close Jira import success alert
merge_request: 36571
author:
type: fixed
---
title: Fix positioning of mr/issue count
merge_request: 36621
author:
type: fixed
---
title: Exclude services relation from Project Import/Export
merge_request: 36569
author:
type: fixed
---
title: Enable Alerts dropdown in Operations Settings
merge_request: 36296
author:
type: added
---
title: Show when alert is new in the Alerts list
merge_request: 35708
author:
type: added
......@@ -17,6 +17,7 @@ resources :issues, concerns: :awardable, constraints: { id: /\d+/ } do
end
collection do
get :service_desk
post :bulk_update
post :import_csv
post :export_csv
......
......@@ -340,6 +340,12 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
# All new routes should go under /-/ scope.
# Look for scope '-' at the top of the file.
#
# Service Desk
#
get '/service_desk' => 'service_desk#show', as: :service_desk
put '/service_desk' => 'service_desk#update', as: :service_desk_refresh
#
# Templates
#
......
......@@ -94,6 +94,7 @@ From there, you can see the following actions:
- Permission to approve merge requests by committers was updated ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/7531) in GitLab 12.9)
- Permission to approve merge requests by authors was updated ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/7531) in GitLab 12.9)
- Number of required approvals was updated ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/7531) in GitLab 12.9)
- Added or removed users and groups from project approval groups ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/213603) in GitLab 13.2)
Project events can also be accessed via the [Project Audit Events API](../api/audit_events.md#project-audit-events-starter)
......
......@@ -186,3 +186,51 @@ conditions become true:
These rules strike a balance between space and performance by only storing
frequently-accessed diffs in the database. Diffs that are less likely to be
accessed are moved to external storage instead.
## Correcting incorrectly-migrated diffs
Versions of GitLab earlier than `v13.0.0` would incorrectly record the location
of some merge request diffs when [external diffs in object storage](#object-storage-settings)
were enabled. This mainly affected imported merge requests, and was resolved
with [this merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/31005).
If you are using object storage, have never used on-disk storage for external
diffs, the "changes" tab for some merge requests fails to load with a 500 error,
and the exception for that error is of this form:
```plain
Errno::ENOENT (No such file or directory @ rb_sysopen - /var/opt/gitlab/gitlab-rails/shared/external-diffs/merge_request_diffs/mr-6167082/diff-8199789)
```
Then you are affected by this issue. Since it's not possible to safely determine
all these conditions automatically, we've provided a Rake task in GitLab v13.2.0
that you can run manually to correct the data:
**In Omnibus installations:**
```shell
sudo gitlab-rake gitlab:external_diffs:force_object_storage
```
**In installations from source:**
```shell
sudo -u git -H bundle exec rake gitlab:external_diffs:force_object_storage RAILS_ENV=production
```
Environment variables can be provided to modify the behavior of the task. The
available variables are:
| Name | Default value | Purpose |
| ---- | ------------- | ------- |
| `ANSI` | `true` | Use ANSI escape codes to make output more understandable |
| `BATCH_SIZE` | `1000` | Iterate through the table in batches of this size |
| `START_ID` | `nil` | If set, begin scanning at this ID |
| `END_ID` | `nil` | If set, stop scanning at this ID |
| `UPDATE_DELAY` | `1` | Number of seconds to sleep between updates |
The `START_ID` and `END_ID` variables may be used to run the update in parallel,
by assigning different processes to different parts of the table. The `BATCH`
and `UPDATE_DELAY` parameters allow the speed of the migration to be traded off
against concurrent access to the table. The `ANSI` parameter should be set to
false if your terminal does not support ANSI escape codes.
......@@ -5303,6 +5303,11 @@ type Group {
"""
subgroupCreationLevel: String
"""
Date until the temporary storage increase is active
"""
temporaryStorageIncreaseEndsOn: Time
"""
Time logged in issues by group members
"""
......@@ -8204,6 +8209,11 @@ type Namespace {
"""
storageSizeLimit: Float
"""
Date until the temporary storage increase is active
"""
temporaryStorageIncreaseEndsOn: Time
"""
Visibility of the namespace
"""
......
......@@ -14595,6 +14595,20 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "temporaryStorageIncreaseEndsOn",
"description": "Date until the temporary storage increase is active",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "timelogs",
"description": "Time logged in issues by group members",
......@@ -24437,6 +24451,20 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "temporaryStorageIncreaseEndsOn",
"description": "Date until the temporary storage increase is active",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "visibility",
"description": "Visibility of the namespace",
......@@ -818,6 +818,7 @@ Autogenerated return type of EpicTreeReorder
| `shareWithGroupLock` | Boolean | Indicates if sharing a project with another group within this group is prevented |
| `storageSizeLimit` | Float | Total storage limit of the root namespace in bytes |
| `subgroupCreationLevel` | String | The permission level required to create subgroups within the group |
| `temporaryStorageIncreaseEndsOn` | Time | Date until the temporary storage increase is active |
| `twoFactorGracePeriod` | Int | Time before two-factor authentication is enforced |
| `userPermissions` | GroupPermissions! | Permissions for the current user on the resource |
| `visibility` | String | Visibility of the namespace |
......@@ -1254,6 +1255,7 @@ Contains statistics about a milestone
| `requestAccessEnabled` | Boolean | Indicates if users can request access to namespace |
| `rootStorageStatistics` | RootStorageStatistics | Aggregated storage statistics of the namespace. Only available for root namespaces |
| `storageSizeLimit` | Float | Total storage limit of the root namespace in bytes |
| `temporaryStorageIncreaseEndsOn` | Time | Date until the temporary storage increase is active |
| `visibility` | String | Visibility of the namespace |
## Note
......
......@@ -80,13 +80,21 @@ file `basic_login_spec.rb`.
### The outer `context` block
Specs have an outer `context` indicating the DevOps stage.
See the [`RSpec.describe` outer block](#the-outer-rspecdescribe-block)
CAUTION: **Deprecation notice:**
The outer `context` [was deprecated](https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/550) in `13.2`
in adherance to RSpec 4.0 specifications. Use `RSpec.describe` instead.
### The outer `RSpec.describe` block
Specs have an outer `RSpec.describe` indicating the DevOps stage.
```ruby
# frozen_string_literal: true
module QA
context 'Manage' do
RSpec.describe 'Manage' do
end
end
......@@ -94,13 +102,13 @@ end
### The `describe` block
Inside of our outer `context`, describe the feature to test. In this case, `Login`.
Inside of our outer `RSpec.describe`, describe the feature to test. In this case, `Login`.
```ruby
# frozen_string_literal: true
module QA
context 'Manage' do
RSpec.describe 'Manage' do
describe 'Login' do
end
......@@ -115,7 +123,7 @@ writing end-to-end tests is to write test case descriptions as `it` blocks:
```ruby
module QA
context 'Manage' do
RSpec.describe 'Manage' do
describe 'Login' do
it 'can login' do
......@@ -139,7 +147,7 @@ Begin by logging in.
# frozen_string_literal: true
module QA
context 'Manage' do
RSpec.describe 'Manage' do
describe 'Login' do
it 'can login' do
Flow::Login.sign_in
......@@ -162,7 +170,7 @@ should answer the question "What do we test?"
# frozen_string_literal: true
module QA
context 'Manage' do
RSpec.describe 'Manage' do
describe 'Login' do
it 'can login' do
Flow::Login.sign_in
......@@ -210,7 +218,7 @@ a call to `sign_in`.
# frozen_string_literal: true
module QA
context 'Manage' do
RSpec.describe 'Manage' do
describe 'Login' do
before do
Flow::Login.sign_in
......@@ -247,7 +255,7 @@ stage, so [create a file](#identify-the-devops-stage) in
# frozen_string_literal: true
module QA
context 'Plan' do
RSpec.describe 'Plan' do
describe 'Issues' do
let(:issue) do
Resource::Issue.fabricate_via_api! do |issue|
......
# Environment selection
Some tests are designed to be run against specific environments. We can specify
what environments to run tests against using the `only` metadata.
## Available switches
| Switch | Function | Type |
| -------| ------- | ----- |
| `tld` | Set the top-level domain matcher | `String` |
| `subdomain` | Set the subdomain matcher | `Array` or `String` |
| `domain` | Set the domain matcher | `String` |
| `production` | Match against production | `Static` |
WARNING: **Be advised:**
You cannot specify `:production` and `{ <switch>: 'value' }` simultaneously.
These options are mutually exclusive. If you want to specify production, you
can control the `tld` and `domain` independently.
## Examples
| Environment | Key | Matches (regex) |
| ---------------- | --- | --------------- |
| `any` | `` | `.+.com` |
| `gitlab.com` | `only: :production` | `gitlab.com` |
| `staging.gitlab.com` | `only: { subdomain: :staging }` | `(staging).+.com` |
| `gitlab.com and staging.gitlab.com` | `only: { subdomain: /(staging.)?/, domain: 'gitlab' }` | `(staging.)?gitlab.com` |
| `dev.gitlab.org` | `only: { tld: '.org', domain: 'gitlab', subdomain: 'dev' }` | `(dev).gitlab.org` |
| `staging.gitlab.com & domain.gitlab.com` | `only: { subdomain: %i[staging domain] }` | `(staging|domain).+.com` |
```ruby
RSpec.describe 'Area' do
it 'runs in any environment' do; end
it 'runs only in production', only: :production do; end
it 'runs only in staging', only: { subdomain: :staging } do; end
it 'runs in dev', only: { tld: '.org', domain: 'gitlab', subdomain: 'dev' } do; end
it 'runs in prod and staging', only: { subdomain: /(staging.)?/, domain: 'gitlab' } {}
end
```
NOTE: **Note:**
If the test has a `before` or `after`, you must add the `only` metadata
to the outer `RSpec.describe`.
......@@ -7,7 +7,7 @@ Note that administrator authorization is required to change feature flags. `QA::
Please be sure to include the tag `:requires_admin` so that the test can be skipped in environments where admin access is not available.
```ruby
context "with feature flag enabled", :requires_admin do
RSpec.describe "with feature flag enabled", :requires_admin do
before do
Runtime::Feature.enable('feature_flag_name')
end
......
......@@ -17,3 +17,4 @@ This is a partial list of the [RSpec metadata](https://relishapp.com/rspec/rspec
| `:gitaly_ha` | The test will run against a GitLab instance where repositories are stored on redundant Gitaly nodes behind a Praefect node. All nodes are [separate containers](../../../administration/gitaly/praefect.md#requirements-for-configuring-a-gitaly-cluster). Tests that use this tag have a longer setup time since there are three additional containers that need to be started. |
| `:skip_live_env` | The test will be excluded when run against live deployed environments such as Staging, Canary, and Production. |
| `:jira` | The test requires a Jira Server. [GitLab-QA](https://gitlab.com/gitlab-org/gitlab-qa) will provision the Jira Server in a Docker container when the `Test::Integration::Jira` test scenario is run.
| `:only` | The test is only to be run against specific environments. See [Environment selection](environment_selection.md) for more information. |
......@@ -230,7 +230,10 @@ On the EC2 dashboard, look for Load Balancer in the left navigation bar:
1. Click the **Create Load Balancer** button.
1. Choose the **Classic Load Balancer**.
1. Give it a name (we'll use `gitlab-loadbalancer`) and for the **Create LB Inside** option, select `gitlab-vpc` from the dropdown menu.
1. In the **Listeners** section, set HTTP port 80, HTTPS port 443, and TCP port 22 for both load balancer and instance protocols and ports.
1. In the **Listeners** section, set the following listeners:
- HTTP port 80 for both load balancer and instance protocol and ports
- TCP port 22 for both load balancer and instance protocols and ports
- HTTPS port 443 for load balancer protocol and ports, forwarding to HTTP port 80 on the instance (we will configure GitLab to listen on port 80 [later in the guide](#add-support-for-proxied-ssl))
1. In the **Select Subnets** section, select both public subnets from the list so that the load balancer can route traffic to both availability zones.
1. We'll add a security group for our load balancer to act as a firewall to control what traffic is allowed through. Click **Assign Security Groups** and select **Create a new security group**, give it a name
(we'll use `gitlab-loadbalancer-sec-group`) and description, and allow both HTTP and HTTPS traffic
......@@ -244,8 +247,7 @@ On the EC2 dashboard, look for Load Balancer in the left navigation bar:
1. For **Ping Path**, enter `/users/sign_in`. (We use `/users/sign_in` as it's a public endpoint that does
not require authorization.)
1. Keep the default **Advanced Details** or adjust them according to your needs.
1. Click **Add EC2 Instances** but, as we don't have any instances to add yet, come back
to your load balancer after creating your GitLab instances and add them.
1. Click **Add EC2 Instances** - don't add anything as we will create an Auto Scaling Group later to manage instances for us.
1. Click **Add Tags** and add any tags you need.
1. Click **Review and Create**, review all your settings, and click **Create** if you're happy.
......@@ -794,14 +796,14 @@ to request additional material:
Activate all GitLab Enterprise Edition functionality with a license.
- [Pricing](https://about.gitlab.com/pricing/): Pricing for the different tiers.
<!-- ## Troubleshooting
## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
one might have when setting this up, or when something is changed, or on upgrading, it's
important to describe those, too. Think of things that may go wrong and include them here.
This is important to minimize requests for support and to avoid doc comments with
questions that you know someone might ask.
### Instances are failing health checks
Each scenario can be a third-level heading, e.g. `### Getting error message X`.
If you have none to add when creating a doc, leave this section in place
but commented out to help encourage others to add to it in the future. -->
If your instances are failing the load balancer's health checks, verify that they are returning a status `200` from the health check endpoint we configured earlier. Any other status, including redirects (e.g. status `302`) will cause the health check to fail.
You may have to set a password on the `root` user to prevent automatic redirects on the sign-in endpoint before health checks will pass.
### "The change you requested was rejected (422)"
If you see this page when trying to set a password via the web interface, make sure `external_url` in `gitlab.rb` matches the domain you are making a request from, and run `sudo gitlab-ctl reconfigure` after making any changes to it.
......@@ -72,20 +72,21 @@ For [manually configured Prometheus instances](../../user/project/integrations/p
| One of `annotations/title`, `annotations/summary`, `labels/alertname` | Yes | Will be used as the chart title |
| `annotations/gitlab_y_label` | No | Will be used as the chart's y-axis label |
## Embedding Cluster Health Charts **(ULTIMATE)**
## Embedding cluster health charts
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/40997) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
> - [Introduced](<https://gitlab.com/gitlab-org/gitlab/-/issues/40997>) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
> - [Moved](<https://gitlab.com/gitlab-org/gitlab/-/issues/208224>) to GitLab core in 13.2.
[Cluster Health Metrics](../../user/project/clusters/index.md#visualizing-cluster-health-ultimate) can also be embedded in [GitLab-flavored Markdown](../../user/markdown.md).
[Cluster Health Metrics](../../user/project/clusters/index.md#visualizing-cluster-health) can also be embedded in [GitLab-flavored Markdown](../../user/markdown.md).
To embed a metric chart, include a link to that chart in the form `https://<root_url>/<project>/-/cluster/<cluster_id>?<query_params>` anywhere that GitLab-flavored Markdown is supported. To generate and copy a link to the chart, follow the instructions in the [Cluster Health Metric documentation](../../user/project/clusters/index.md#visualizing-cluster-health-ultimate).
To embed a metric chart, include a link to that chart in the form `https://<root_url>/<project>/-/cluster/<cluster_id>?<query_params>` anywhere that GitLab-flavored Markdown is supported. To generate and copy a link to the chart, follow the instructions in the [Cluster Health Metric documentation](../../user/project/clusters/index.md#visualizing-cluster-health).
The following requirements must be met for the metric to unfurl:
- The `<cluster_id>` must correspond to a real cluster.
- Prometheus must be monitoring the cluster.
- The user must be allowed access to the project cluster metrics.
- The dashboards must be reporting data on the [Cluster Health Page](../../user/project/clusters/index.md#visualizing-cluster-health-ultimate)
- The dashboards must be reporting data on the [Cluster Health Page](../../user/project/clusters/index.md#visualizing-cluster-health)
If the above requirements are met, then the metric will unfurl as seen below.
......
......@@ -127,3 +127,15 @@ To further customize the issue, you can add labels, mentions, or any other suppo
Since [version 12.2](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/63373), GitLab will tag each incident issue with the `incident` label automatically. If the label does not yet exist, it will be created automatically as well.
If the metric exceeds the threshold of the alert for over 5 minutes, an email will be sent to all [Maintainers and Owners](../../user/permissions.md#project-members-permissions) of the project.
## Keyboard shortcuts for charts
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202146) in GitLab 13.2.
You can use keyboard shortcuts to interact more quickly with your currently-focused chartpanel. To activate keyboard shortcuts, use keyboard tabs to highlight the**{ellipsis_v}** **More actions** dropdown menu, or hover over the dropdown menuwith your mouse, then press the key corresponding to your desired action:
- **Expand panel** - <kbd>e</kbd>
- **View logs** - <kbd>l</kbd> (lowercase 'L')
- **Download CSV** - <kbd>d</kbd>
- **Copy link to chart** - <kbd>c</kbd>
- **Alerts** - <kbd>a</kbd>
---
stage: Secure
group: Fuzz Testing
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
type: reference, howto
---
# Coverage Guided Fuzz Testing **(ULTIMATE)**
> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/3226) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 13.2 as an [Alpha feature](https://about.gitlab.com/handbook/product/gitlab-the-product/#alpha).
GitLab allows you to add coverage-guided fuzz testing to your pipelines. This helps you discover
bugs and potential security issues that other QA processes may miss. Coverage-guided fuzzing sends
random inputs to an instrumented version of your application in an effort to cause unexpected
behavior, such as a crash. Such behavior indicates a bug that you should address.
We recommend that you use fuzz testing in addition to the other security scanners in [GitLab Secure](../index.md)
and your own test processes. If you're using [GitLab CI/CD](../../../ci/README.md),
you can run your coverage guided fuzz tests as part your CI/CD workflow. You can take advantage of
Coverage Guided Fuzzing by including the CI job in your existing `.gitlab-ci.yml` file.
## Supported fuzzing engines and languages
GitLab supports these languages through the fuzzing engine listed for each. We currently provide a Docker image for apps written in Go, but you can test the other languages below by providing a Docker image with the fuzz engine to run your app.
| Language | Fuzzing Engine | Example |
|----------|---------------------------------------------------------------------------|---------|
| C/C++ | [libFuzzer](https://llvm.org/docs/LibFuzzer.html) | |
| GoLang | [go-fuzz (libFuzzer support)](https://github.com/dvyukov/go-fuzz) | |
| Rust | [cargo-fuzz (libFuzzer support)](https://github.com/rust-fuzz/cargo-fuzz) | |
## Configuration
To enable fuzzing, you must
[include](../../../ci/yaml/README.md#includetemplate)
the [`Coverage-Fuzzing.gitlab-ci.yml` template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Security/Coverage-Fuzzing.gitlab-ci.yml)
provided as part of your GitLab installation.
To do so, add the following to your `.gitlab-ci.yml` file:
```yaml
include:
- template: Coverage-Fuzzing.gitlab-ci.yml
```
The included template makes available the [hidden job](../../../ci/yaml/README.md#hide-jobs)
`.fuzz_base`, which you must [extend](../../../ci/yaml/README.md#extends) for each of your fuzz
targets. Each fuzz target **must** have a separate job. For example, the
[go-fuzzing-example project](https://gitlab.com/gitlab-org/security-products/demos/go-fuzzing-example)
contains one job that extends `.fuzz_base` for its single fuzz target.
The `my_fuzz_target` job (the separate job for your fuzz target) does the following:
- Extends `.fuzz_base`.
- Compiles the fuzz target with [go-fuzz](https://github.com/dvyukov/go-fuzz).
- Runs the target with the `gitlab-cov-fuzz` command, which is available to each job that extends
`.fuzz_base`.
- Runs on a fuzz stage that usually comes after a test stage.
The `gitlab-cov-fuzz` is a command-line tool that runs the instrumented application. It parses and
analyzes the exception information that the fuzzer outputs. It also downloads the [corpus](#glossary)
and crash events from previous pipelines automatically. This helps your fuzz targets build on the progress of
previous fuzzing jobs. The parsed crash events and data are written to
`gl-coverage-fuzzing-report.json` and then displayed in the pipeline and security dashboard.
### Artifacts
Each fuzzing step outputs these artifacts:
- `gl-coverage-fuzzing-report.json`: Parsed by GitLab's backend to show results in the security
dashboard. This file's format may change in future releases.
- `artifacts.zip`: This file contains two directories:
- `corpus`: Holds all test cases generated by the current and all previous jobs.
- `crashes`: Holds all crash events the current job encountered as well as those not fixed in
previous jobs.
### Types of Fuzzing Jobs
There are two types of jobs:
- Fuzzing: Standard fuzzing session. You can configure a long session through a user defined
timeout.
- Regression: Run the fuzz targets through the accumulated test cases generated by previous fuzzing
sessions plus fixed crashes from previous sessions. This is usually very quick.
Here's our current suggestion for configuring your fuzz target's timeout:
- Set `COVERAGE_FUZZING_BRANCH` to the branch where you want to run long-running (async) fuzzing
jobs. This is `master` by default.
- Use regression or short-running fuzzing jobs for other branches or merge requests.
This suggestion helps find new bugs on the development branch and catch old bugs in merge requests
(like unit tests).
You can configure this by passing `--regression=false/true` to `gitlab-cov-fuzz` as the [Go example](https://gitlab.com/gitlab-org/security-products/demos/go-fuzzing-example/-/blob/master/.gitlab-ci.yml)
shows. Also note that `gitlab-cov-fuzz` is a wrapper, so you can pass those arguments to configure
any option available in the underlying fuzzing engine.
### Available variables
| Environment variable | Description |
|---------------------------|--------------------------------------------------------------------|
| `COVERAGE_FUZZING_BRANCH` | The branch for long-running fuzzing jobs. The default is `master`. |
### Additional Configuration
The `gitlab-cov-fuzz` command passes all arguments it receives to the underlying fuzzing engine. You
can therefore use all the options available in that fuzzing engine. For more information on these
options, see the underlying fuzzing engine's documentation.
### Glossary
- Seed corpus: The set of test cases given as initial input to the fuzz target. This usually speeds
up the fuzz target substantially. This can be either manually created test cases or auto-generated
with the fuzz target itself from previous runs.
- Corpus: The set of meaningful test cases that are generated while the fuzzer is running. Each
meaningful test case produces new coverage in the tested program. It's advised to re-use the
corpus and pass it to subsequent runs.
......@@ -364,9 +364,10 @@ Automatically detect and monitor Kubernetes metrics. Automatic monitoring of
[Read more about Kubernetes monitoring](../integrations/prometheus_library/kubernetes.md)
### Visualizing cluster health **(ULTIMATE)**
### Visualizing cluster health
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/4701) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.6.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/4701) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.6.
> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/208224) to GitLab core in 13.2.
When [Prometheus is deployed](#installing-applications), GitLab will automatically monitor the cluster's health. At the top of the cluster settings page, CPU and Memory utilization is displayed, along with the total amount available. Keeping an eye on cluster resources can be important, if the cluster runs out of memory pods may be shutdown or fail to start.
......
......@@ -168,6 +168,7 @@ excluded_attributes:
- :marked_for_deletion_by_user_id
- :compliance_framework_setting
- :show_default_award_emojis
- :services
namespaces:
- :runners_token
- :runners_token_encrypted
......
namespace :gitlab do
namespace :external_diffs do
desc "Override external diffs in file storage to be in object storage instead. This does not change the actual location of the data"
task force_object_storage: :environment do |t, args|
ansi = Gitlab::Utils.to_boolean(ENV.fetch('ANSI', true))
batch = ENV.fetch('BATCH_SIZE', 1000)
start_id = ENV.fetch('START_ID', nil)
end_id = ENV.fetch('END_ID', nil)
update_delay = args.fetch('UPDATE_DELAY', 1)
# Use ANSI codes to overwrite the same line repeatedly if supported
newline = ansi ? "\x1B8\x1B[2K" : "\n"
total = 0
# The only useful index on the table is by id, so scan through the whole
# table by that and filter out those we don't want on each relation
MergeRequestDiff.in_batches(of: batch, start: start_id, finish: end_id) do |relation| # rubocop:disable Cop/InBatches
count = relation
.except(:order)
.where(stored_externally: true, external_diff_store: ExternalDiffUploader::Store::LOCAL)
.update_all(external_diff_store: ExternalDiffUploader::Store::REMOTE)
total += count
if count > 0
print "#{newline}#{total} updated..."
sleep(update_delay) if update_delay > 0
end
end
puts "done!"
end
end
end
......@@ -16260,9 +16260,6 @@ msgstr ""
msgid "Owner"
msgstr ""
msgid "Package Files"
msgstr ""
msgid "Package Registry"
msgstr ""
......
......@@ -13,7 +13,7 @@ module QA
##
# TODO, phase-out CSS classes added in Ruby helpers.
#
view 'app/helpers/runners_helper.rb' do
view 'app/helpers/ci/runners_helper.rb' do
# rubocop:disable Lint/InterpolationCheck
element :runner_status, 'runner-status-#{status}' # rubocop:disable QA/ElementWithPattern
# rubocop:enable Lint/InterpolationCheck
......
# frozen_string_literal: true
require 'gitlab/qa'
require 'uri'
module QA
module Runtime
......@@ -23,10 +24,39 @@ module QA
SUPPORTED_FEATURES
end
def dot_com?
Runtime::Scenario.gitlab_address.include?(".com")
def address_matches?(*options)
return false unless Runtime::Scenario.attributes[:gitlab_address]
opts = {}
opts[:domain] = '.+'
opts[:tld] = '.com'
uri = URI(Runtime::Scenario.gitlab_address)
if options.any?
options.each do |option|
opts[:domain] = 'gitlab' if option == :production
if option.is_a?(Hash) && !option[:subdomain].nil?
opts.merge!(option)
opts[:subdomain] = case option[:subdomain]
when Array
"(#{option[:subdomain].join("|")})."
when Regexp
option[:subdomain]
else
"(#{option[:subdomain]})."
end
end
end
end
uri.host.match?(/^#{opts[:subdomain]}#{opts[:domain]}#{opts[:tld]}$/)
end
alias_method :dot_com?, :address_matches?
def additional_repository_storage
ENV['QA_ADDITIONAL_REPOSITORY_STORAGE']
end
......
......@@ -18,6 +18,10 @@ module QA
config.before do |example|
Quarantine.skip_or_run_quarantined_tests_or_contexts(config.inclusion_filter.rules, example)
if example.metadata.key?(:only)
skip('Test is not compatible with this environment') unless Runtime::Env.dot_com?(example.metadata[:only])
end
end
end
end
......
......@@ -341,7 +341,7 @@ describe QA::Runtime::Env do
end
end
describe '.dot_com?' do
describe '.address_matches?' do
it 'returns true when url has .com' do
QA::Runtime::Scenario.define(:gitlab_address, "https://staging.gitlab.com")
......@@ -351,7 +351,45 @@ describe QA::Runtime::Env do
it 'returns false when url does not have .com' do
QA::Runtime::Scenario.define(:gitlab_address, "https://gitlab.test")
expect(described_class.dot_com?).to be_falsy
expect(described_class.dot_com?).to be_falsey
end
context 'with arguments' do
it 'returns true when :subdomain is set' do
QA::Runtime::Scenario.define(:gitlab_address, "https://staging.gitlab.com")
expect(described_class.dot_com?(subdomain: :staging)).to be_truthy
end
it 'matches multiple subdomains' do
QA::Runtime::Scenario.define(:gitlab_address, "https://staging.gitlab.com")
expect(described_class.address_matches?(subdomain: [:release, :staging])).to be_truthy
expect(described_class.address_matches?(:production, subdomain: [:release, :staging])).to be_truthy
end
it 'matches :production' do
QA::Runtime::Scenario.define(:gitlab_address, "https://gitlab.com/")
expect(described_class.address_matches?(:production)).to be_truthy
end
it 'doesnt match with mismatching switches' do
QA::Runtime::Scenario.define(:gitlab_address, 'https://gitlab.test')
aggregate_failures do
expect(described_class.address_matches?(tld: '.net')).to be_falsey
expect(described_class.address_matches?(:production)).to be_falsey
expect(described_class.address_matches?(subdomain: [:staging])).to be_falsey
expect(described_class.address_matches?(domain: 'example')).to be_falsey
end
end
end
it 'returns false for mismatching' do
QA::Runtime::Scenario.define(:gitlab_address, "https://staging.gitlab.com")
expect(described_class.address_matches?(:production)).to be_falsey
end
end
end
......@@ -280,4 +280,94 @@ describe QA::Specs::Helpers::Quarantine do
end
end
end
describe 'running against specific environments' do
before do
QA::Runtime::Scenario.define(:gitlab_address, 'https://staging.gitlab.com')
described_class.configure_rspec
end
describe 'description and context blocks' do
context 'with environment set' do
it 'can apply to contexts or descriptions' do
group = describe_successfully 'Runs in staging', only: { subdomain: :staging } do
it('runs in staging') {}
end
expect(group.examples[0].execution_result.status).to eq(:passed)
end
end
context 'with different environment set' do
before do
QA::Runtime::Scenario.define(:gitlab_address, 'https://gitlab.com')
described_class.configure_rspec
end
it 'does not run against production' do
group = describe_successfully 'Runs in staging', :something, only: { subdomain: :staging } do
it('runs in staging') {}
end
expect(group.examples[0].execution_result.status).to eq(:pending)
end
end
end
it 'runs only in staging' do
group = describe_successfully do
it('runs in staging', only: { subdomain: :staging }) {}
it('doesnt run in staging', only: :production) {}
it('runs in staging also', only: { subdomain: %i[release staging] }) {}
it('runs in any env') {}
end
expect(group.examples[0].execution_result.status).to eq(:passed)
expect(group.examples[1].execution_result.status).to eq(:pending)
expect(group.examples[2].execution_result.status).to eq(:passed)
expect(group.examples[3].execution_result.status).to eq(:passed)
end
context 'custom env' do
before do
QA::Runtime::Scenario.define(:gitlab_address, 'https://release.gitlab.net')
end
it 'runs on a custom environment' do
group = describe_successfully do
it('runs on release gitlab net', only: { tld: '.net', subdomain: :release, domain: 'gitlab' } ) {}
it('does not run on release', only: :production ) {}
end
expect(group.examples.first.execution_result.status).to eq(:passed)
expect(group.examples.last.execution_result.status).to eq(:pending)
end
end
context 'production' do
before do
QA::Runtime::Scenario.define(:gitlab_address, 'https://gitlab.com/')
end
it 'runs on production' do
group = describe_successfully do
it('runs on prod', only: :production ) {}
it('does not run in prod', only: { subdomain: :staging }) {}
it('runs in prod and staging', only: { subdomain: /(staging.)?/, domain: 'gitlab' }) {}
end
expect(group.examples[0].execution_result.status).to eq(:passed)
expect(group.examples[1].execution_result.status).to eq(:pending)
expect(group.examples[2].execution_result.status).to eq(:passed)
end
end
it 'outputs a message for invalid environments' do
group = describe_successfully do
it('will skip', only: :production) {}
end
expect(group.examples.first.execution_result.pending_message).to match(/[Tt]est.*not compatible.*environment/)
end
end
end
......@@ -1564,6 +1564,43 @@ RSpec.describe Projects::IssuesController do
end
end
describe 'GET service_desk' do
let_it_be(:project) { create(:project_empty_repo, :public) }
let_it_be(:support_bot) { User.support_bot }
let_it_be(:other_user) { create(:user) }
let_it_be(:service_desk_issue_1) { create(:issue, project: project, author: support_bot) }
let_it_be(:service_desk_issue_2) { create(:issue, project: project, author: support_bot, assignees: [other_user]) }
let_it_be(:other_user_issue) { create(:issue, project: project, author: other_user) }
def get_service_desk(extra_params = {})
get :service_desk, params: extra_params.merge(namespace_id: project.namespace, project_id: project)
end
it 'adds an author filter for the support bot user' do
get_service_desk
expect(assigns(:issues)).to contain_exactly(service_desk_issue_1, service_desk_issue_2)
end
it 'does not allow any other author to be set' do
get_service_desk(author_username: other_user.username)
expect(assigns(:issues)).to contain_exactly(service_desk_issue_1, service_desk_issue_2)
end
it 'supports other filters' do
get_service_desk(assignee_username: other_user.username)
expect(assigns(:issues)).to contain_exactly(service_desk_issue_2)
end
it 'allows an assignee to be specified by id' do
get_service_desk(assignee_id: other_user.id)
expect(assigns(:users)).to contain_exactly(other_user, support_bot)
end
end
describe 'GET #discussions' do
let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ServiceDeskController do
let_it_be(:project) do
create(:project, :private, :custom_repo, service_desk_enabled: true,
files: { '.gitlab/issue_templates/service_desk.md' => 'template' })
end
let_it_be(:user) { create(:user) }
before do
allow(Gitlab::IncomingEmail).to receive(:enabled?) { true }
allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true }
project.add_maintainer(user)
sign_in(user)
end
describe 'GET service desk properties' do
it 'returns service_desk JSON data' do
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }, format: :json
expect(json_response["service_desk_address"]).to match(/\A[^@]+@[^@]+\z/)
expect(json_response["service_desk_enabled"]).to be_truthy
expect(response).to have_gitlab_http_status(:ok)
end
context 'when user is not project maintainer' do
let(:guest) { create(:user) }
it 'renders 404' do
project.add_guest(guest)
sign_in(guest)
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }, format: :json
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when issue template is present' do
it 'returns template_file_missing as false' do
create(:service_desk_setting, project: project, issue_template_key: 'service_desk')
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }, format: :json
response_hash = Gitlab::Json.parse(response.body)
expect(response_hash['template_file_missing']).to eq(false)
end
end
context 'when issue template file becomes outdated' do
it 'returns template_file_missing as true' do
service = ServiceDeskSetting.new(project_id: project.id, issue_template_key: 'deleted')
service.save!(validate: false)
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }, format: :json
expect(json_response['template_file_missing']).to eq(true)
end
end
end
describe 'PUT service desk properties' do
it 'toggles services desk incoming email' do
project.update!(service_desk_enabled: false)
put :update, params: { namespace_id: project.namespace.to_param,
project_id: project,
service_desk_enabled: true }, format: :json
expect(json_response["service_desk_address"]).to be_present
expect(json_response["service_desk_enabled"]).to be_truthy
expect(response).to have_gitlab_http_status(:ok)
end
it 'sets issue_template_key' do
put :update, params: { namespace_id: project.namespace.to_param,
project_id: project,
issue_template_key: 'service_desk' }, format: :json
settings = project.service_desk_setting
expect(settings).to be_present
expect(settings.issue_template_key).to eq('service_desk')
expect(json_response['template_file_missing']).to eq(false)
expect(json_response['issue_template_key']).to eq('service_desk')
end
it 'returns an error when update of service desk settings fails' do
put :update, params: { namespace_id: project.namespace.to_param,
project_id: project,
issue_template_key: 'invalid key' }, format: :json
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to eq('Issue template key is empty or does not exist')
end
context 'when user cannot admin the project' do
let(:other_user) { create(:user) }
it 'renders 404' do
sign_in(other_user)
put :update, params: { namespace_id: project.namespace.to_param, project_id: project, service_desk_enabled: true }, format: :json
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
......@@ -6,7 +6,7 @@ RSpec.describe ProjectsController do
include ExternalAuthorizationServiceHelpers
include ProjectForksHelper
let(:project) { create(:project) }
let(:project) { create(:project, service_desk_enabled: false) }
let(:public_project) { create(:project, :public) }
let(:user) { create(:user) }
let(:jpg) { fixture_file_upload('spec/fixtures/rails_sample.jpg', 'image/jpg') }
......@@ -1396,6 +1396,27 @@ RSpec.describe ProjectsController do
end
end
it 'updates Service Desk attributes' do
project.add_maintainer(user)
sign_in(user)
allow(Gitlab::IncomingEmail).to receive(:enabled?) { true }
allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true }
params = {
service_desk_enabled: true
}
put :update,
params: {
namespace_id: project.namespace,
id: project,
project: params
}
project.reload
expect(response).to have_gitlab_http_status(:found)
expect(project.service_desk_enabled).to eq(true)
end
def project_moved_message(redirect_route, project)
"Project '#{redirect_route.path}' was moved to '#{project.full_path}'. Please update any links and bookmarks that may still have the old path."
end
......
......@@ -349,6 +349,40 @@ describe('AlertManagementTable', () => {
});
expect(findDateFields().exists()).toBe(false);
});
describe('New Alert indicator', () => {
const oldAlert = mockAlerts[0];
const newAlert = { ...oldAlert, isNew: true };
it('should highlight the row when alert is new', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
data: { alerts: { list: [newAlert] }, alertsCount, errored: false },
loading: false,
});
expect(
findAlerts()
.at(0)
.classes(),
).toContain('new-alert');
});
it('should not highlight the row when alert is not new', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
data: { alerts: { list: [oldAlert] }, alertsCount, errored: false },
loading: false,
});
expect(
findAlerts()
.at(0)
.classes(),
).not.toContain('new-alert');
});
});
});
});
......
......@@ -12,7 +12,9 @@ exports[`AlertsSettingsForm with default values renders the initial template 1`]
</p>
</div>
<gl-form-stub>
<!---->
<gl-form-group-stub label=\\"Integrations\\" label-for=\\"integrations\\" label-class=\\"label-bold\\">
<gl-form-select-stub options=\\"[object Object],[object Object]\\" data-testid=\\"alert-settings-select\\" value=\\"generic\\"></gl-form-select-stub> <span class=\\"gl-text-gray-400\\"><gl-sprintf-stub message=\\"Learn more about our %{linkStart}upcoming integrations%{linkEnd}\\"></gl-sprintf-stub></span>
</gl-form-group-stub>
<gl-form-group-stub label=\\"Active\\" label-for=\\"activated\\" label-class=\\"label-bold\\">
<toggle-button-stub id=\\"activated\\"></toggle-button-stub>
</gl-form-group-stub>
......@@ -29,9 +31,18 @@ exports[`AlertsSettingsForm with default values renders the initial template 1`]
Resetting the authorization key for this project will require updating the authorization key in every alert source it is enabled in.
</gl-modal-stub>
</gl-form-group-stub>
<!---->
<gl-form-group-stub label=\\"Alert test payload\\" label-for=\\"alert-json\\" label-class=\\"label-bold\\">
<gl-form-textarea-stub noresize=\\"true\\" id=\\"alert-json\\" disabled=\\"true\\" state=\\"true\\" placeholder=\\"Enter test alert JSON....\\" rows=\\"6\\" max-rows=\\"10\\"></gl-form-textarea-stub>
</gl-form-group-stub>
<gl-button-stub category=\\"tertiary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">Test alert payload</gl-button-stub>
<!---->
<div class=\\"footer-block row-content-block gl-display-flex gl-justify-content-space-between\\">
<gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\" type=\\"submit\\">
Save changes
</gl-button-stub>
<gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" type=\\"reset\\">
Cancel
</gl-button-stub>
</div>
</gl-form-stub>
</div>"
`;
......@@ -32,12 +32,7 @@ describe('AlertsSettingsForm', () => {
let wrapper;
let mockAxios;
const createComponent = (
props = defaultProps,
{ methods } = {},
alertIntegrationsDropdown = false,
data,
) => {
const createComponent = (props = defaultProps, { methods } = {}, data) => {
wrapper = shallowMount(AlertsSettingsForm, {
data() {
return { ...data };
......@@ -47,11 +42,6 @@ describe('AlertsSettingsForm', () => {
...props,
},
methods,
provide: {
glFeatures: {
alertIntegrationsDropdown,
},
},
});
};
......
......@@ -16,10 +16,8 @@ describe('IssuableListRootApp', () => {
const findAlertLabel = () => wrapper.find(GlAlert).find(GlLabel);
const mountComponent = ({
isFinishedAlertShowing = false,
isInProgressAlertShowing = false,
isInProgress = false,
isFinished = false,
shouldShowFinishedAlert = false,
shouldShowInProgressAlert = false,
} = {}) =>
shallowMount(IssuableListRootApp, {
propsData: {
......@@ -30,13 +28,11 @@ describe('IssuableListRootApp', () => {
},
data() {
return {
isFinishedAlertShowing,
isInProgressAlertShowing,
jiraImport: {
importedIssuesCount: 1,
isInProgress,
isFinished,
label,
shouldShowFinishedAlert,
shouldShowInProgressAlert,
},
};
},
......@@ -58,8 +54,7 @@ describe('IssuableListRootApp', () => {
describe('when Jira import is in progress', () => {
it('shows an alert that tells the user a Jira import is in progress', () => {
wrapper = mountComponent({
isInProgressAlertShowing: true,
isInProgress: true,
shouldShowInProgressAlert: true,
});
expect(findAlert().text()).toBe(
......@@ -71,8 +66,7 @@ describe('IssuableListRootApp', () => {
describe('when Jira import has finished', () => {
beforeEach(() => {
wrapper = mountComponent({
isFinishedAlertShowing: true,
isFinished: true,
shouldShowFinishedAlert: true,
});
});
......@@ -106,8 +100,7 @@ describe('IssuableListRootApp', () => {
describe('alert message', () => {
it('is hidden when dismissed', () => {
wrapper = mountComponent({
isInProgressAlertShowing: true,
isInProgress: true,
shouldShowInProgressAlert: true,
});
expect(wrapper.contains(GlAlert)).toBe(true);
......
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import {
calculateJiraImportLabel,
extractJiraProjectsOptions,
IMPORT_STATE,
isFinished,
isInProgress,
setFinishedAlertHideMap,
shouldShowFinishedAlert,
} from '~/jira_import/utils/jira_import_utils';
import { JIRA_IMPORT_SUCCESS_ALERT_HIDE_MAP_KEY } from '~/issuables_list/constants';
useLocalStorageSpy();
describe('isInProgress', () => {
it.each`
......@@ -89,3 +95,56 @@ describe('calculateJiraImportLabel', () => {
expect(label.color).toBe('#333');
});
});
describe('shouldShowFinishedAlert', () => {
const labelTitle = 'jira-import::JCP-1';
afterEach(() => {
localStorage.clear();
});
it('checks localStorage value', () => {
jest.spyOn(localStorage, 'getItem').mockReturnValue(JSON.stringify({}));
shouldShowFinishedAlert(labelTitle, IMPORT_STATE.FINISHED);
expect(localStorage.getItem).toHaveBeenCalledWith(JIRA_IMPORT_SUCCESS_ALERT_HIDE_MAP_KEY);
});
it('returns true when an import has finished', () => {
jest.spyOn(localStorage, 'getItem').mockReturnValue(JSON.stringify({}));
expect(shouldShowFinishedAlert(labelTitle, IMPORT_STATE.FINISHED)).toBe(true);
});
it('returns false when an import has finished but the user chose to hide the alert', () => {
jest.spyOn(localStorage, 'getItem').mockReturnValue(JSON.stringify({ [labelTitle]: true }));
expect(shouldShowFinishedAlert(labelTitle, IMPORT_STATE.FINISHED)).toBe(false);
});
it('returns false when an import has not finished', () => {
jest.spyOn(localStorage, 'getItem').mockReturnValue(JSON.stringify({}));
expect(shouldShowFinishedAlert(labelTitle, IMPORT_STATE.SCHEDULED)).toBe(false);
});
});
describe('setFinishedAlertHideMap', () => {
const labelTitle = 'jira-import::ABC-1';
const newLabelTitle = 'jira-import::JCP-1';
it('sets item to localStorage correctly', () => {
jest.spyOn(localStorage, 'getItem').mockReturnValue(JSON.stringify({ [labelTitle]: true }));
setFinishedAlertHideMap(newLabelTitle);
expect(localStorage.setItem).toHaveBeenCalledWith(
JIRA_IMPORT_SUCCESS_ALERT_HIDE_MAP_KEY,
JSON.stringify({
[labelTitle]: true,
[newLabelTitle]: true,
}),
);
});
});
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe RunnersHelper do
RSpec.describe Ci::RunnersHelper do
it "returns - not contacted yet" do
runner = FactoryBot.build :ci_runner
expect(runner_status_icon(runner)).to include("not connected yet")
......
......@@ -87,6 +87,56 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
context 'for create' do
it 'include usage_activity_by_stage data' do
expect(described_class.uncached_data[:usage_activity_by_stage][:create])
.not_to include(
:merge_requests_users
)
end
it 'includes monthly usage_activity_by_stage data' do
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:create])
.to include(
:merge_requests_users
)
end
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
project = create(:project, :repository_private,
:test_repo, :remote_mirror, creator: user)
create(:merge_request, source_project: project)
create(:deploy_key, user: user)
create(:key, user: user)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: true)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: false)
create(:remote_mirror, project: project)
create(:snippet, author: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:create]).to include(
deploy_keys: 2,
keys: 2,
merge_requests: 2,
projects_with_disable_overriding_approvers_per_merge_request: 2,
projects_without_disable_overriding_approvers_per_merge_request: 4,
remote_mirrors: 2,
snippets: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:create]).to include(
deploy_keys: 1,
keys: 1,
merge_requests: 1,
projects_with_disable_overriding_approvers_per_merge_request: 1,
projects_without_disable_overriding_approvers_per_merge_request: 2,
remote_mirrors: 1,
snippets: 1
)
end
end
context 'for manage' do
it 'includes accurate usage_activity_by_stage data' do
stub_config(
......@@ -123,6 +173,77 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
context 'for monitor' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user, dashboard: 'operations')
cluster = create(:cluster, user: user)
create(:project, creator: user)
create(:clusters_applications_prometheus, :installed, cluster: cluster)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:monitor]).to include(
clusters: 2,
clusters_applications_prometheus: 2,
operations_dashboard_default_dashboard: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:monitor]).to include(
clusters: 1,
clusters_applications_prometheus: 1,
operations_dashboard_default_dashboard: 1
)
end
end
context 'for plan' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
project = create(:project, creator: user)
issue = create(:issue, project: project, author: user)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:plan]).to include(
issues: 2,
notes: 2,
projects: 2,
todos: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:plan]).to include(
issues: 1,
notes: 1,
projects: 1,
todos: 1
)
end
end
context 'for release' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
create(:deployment, :failed, user: user)
create(:release, author: user)
create(:deployment, :success, user: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:release]).to include(
deployments: 2,
failed_deployments: 2,
releases: 2,
successful_deployments: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:release]).to include(
deployments: 1,
failed_deployments: 1,
releases: 1,
successful_deployments: 1
)
end
end
context 'for verify' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
......@@ -163,127 +284,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
context 'for create' do
it 'include usage_activity_by_stage data' do
expect(described_class.uncached_data[:usage_activity_by_stage][:create])
.not_to include(
:merge_requests_users
)
end
it 'includes monthly usage_activity_by_stage data' do
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:create])
.to include(
:merge_requests_users
)
end
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
project = create(:project, :repository_private,
:test_repo, :remote_mirror, creator: user)
create(:merge_request, source_project: project)
create(:deploy_key, user: user)
create(:key, user: user)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: true)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: false)
create(:remote_mirror, project: project)
create(:snippet, author: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:create]).to include(
deploy_keys: 2,
keys: 2,
merge_requests: 2,
projects_with_disable_overriding_approvers_per_merge_request: 2,
projects_without_disable_overriding_approvers_per_merge_request: 4,
remote_mirrors: 2,
snippets: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:create]).to include(
deploy_keys: 1,
keys: 1,
merge_requests: 1,
projects_with_disable_overriding_approvers_per_merge_request: 1,
projects_without_disable_overriding_approvers_per_merge_request: 2,
remote_mirrors: 1,
snippets: 1
)
end
end
context 'for monitor' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user, dashboard: 'operations')
cluster = create(:cluster, user: user)
create(:project, creator: user)
create(:clusters_applications_prometheus, :installed, cluster: cluster)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:monitor]).to include(
clusters: 2,
clusters_applications_prometheus: 2,
operations_dashboard_default_dashboard: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:monitor]).to include(
clusters: 1,
clusters_applications_prometheus: 1,
operations_dashboard_default_dashboard: 1
)
end
end
context 'for plan' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
project = create(:project, creator: user)
issue = create(:issue, project: project, author: user)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:plan]).to include(
issues: 2,
notes: 2,
projects: 2,
todos: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:plan]).to include(
issues: 1,
notes: 1,
projects: 1,
todos: 1
)
end
end
context 'for release' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
create(:deployment, :failed, user: user)
create(:release, author: user)
create(:deployment, :success, user: user)
end
expect(described_class.uncached_data[:usage_activity_by_stage][:release]).to include(
deployments: 2,
failed_deployments: 2,
releases: 2,
successful_deployments: 2
)
expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:release]).to include(
deployments: 1,
failed_deployments: 1,
releases: 1,
successful_deployments: 1
)
end
end
it 'ensures recorded_at is set before any other usage data calculation' do
%i(alt_usage_data redis_usage_data distinct_count count).each do |method|
expect(described_class).not_to receive(method)
......
......@@ -210,4 +210,49 @@ RSpec.describe Issues::MoveService do
end
end
end
context 'updating sent notifications' do
let!(:old_issue_notification_1) { create(:sent_notification, project: old_issue.project, noteable: old_issue) }
let!(:old_issue_notification_2) { create(:sent_notification, project: old_issue.project, noteable: old_issue) }
let!(:other_issue_notification) { create(:sent_notification, project: old_issue.project) }
include_context 'user can move issue'
context 'when issue is from service desk' do
before do
allow(old_issue).to receive(:from_service_desk?).and_return(true)
end
it 'updates moved issue sent notifications' do
new_issue = move_service.execute(old_issue, new_project)
old_issue_notification_1.reload
old_issue_notification_2.reload
expect(old_issue_notification_1.project_id).to eq(new_issue.project_id)
expect(old_issue_notification_1.noteable_id).to eq(new_issue.id)
expect(old_issue_notification_2.project_id).to eq(new_issue.project_id)
expect(old_issue_notification_2.noteable_id).to eq(new_issue.id)
end
it 'does not update other issues sent notifications' do
expect do
move_service.execute(old_issue, new_project)
other_issue_notification.reload
end.not_to change { other_issue_notification.noteable_id }
end
end
context 'when issue is not from service desk' do
it 'does not update sent notifications' do
move_service.execute(old_issue, new_project)
old_issue_notification_1.reload
old_issue_notification_2.reload
expect(old_issue_notification_1.project_id).to eq(old_issue.project_id)
expect(old_issue_notification_1.noteable_id).to eq(old_issue.id)
expect(old_issue_notification_2.project_id).to eq(old_issue.project_id)
expect(old_issue_notification_2.noteable_id).to eq(old_issue.id)
end
end
end
end
......@@ -343,6 +343,79 @@ RSpec.describe NotificationService, :mailer do
end
end
context 'on service desk issue' do
before do
allow(Notify).to receive(:service_desk_new_note_email)
.with(Integer, Integer).and_return(mailer)
allow(::Gitlab::IncomingEmail).to receive(:enabled?) { true }
allow(::Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true }
end
let(:subject) { NotificationService.new }
let(:mailer) { double(deliver_later: true) }
def should_email!
expect(Notify).to receive(:service_desk_new_note_email)
.with(issue.id, note.id)
end
def should_not_email!
expect(Notify).not_to receive(:service_desk_new_note_email)
end
def execute!
subject.new_note(note)
end
def self.it_should_email!
it 'sends the email' do
should_email!
execute!
end
end
def self.it_should_not_email!
it 'doesn\'t send the email' do
should_not_email!
execute!
end
end
let(:issue) { create(:issue, author: User.support_bot) }
let(:project) { issue.project }
let(:note) { create(:note, noteable: issue, project: project) }
context 'a non-service-desk issue' do
it_should_not_email!
end
context 'a service-desk issue' do
before do
issue.update!(service_desk_reply_to: 'service.desk@example.com')
project.update!(service_desk_enabled: true)
end
it_should_email!
context 'where the project has disabled the feature' do
before do
project.update(service_desk_enabled: false)
end
it_should_not_email!
end
context 'when the support bot has unsubscribed' do
before do
issue.unsubscribe(User.support_bot, project)
end
it_should_not_email!
end
end
end
describe 'new note on issue in project that belongs to a group' do
before do
note.project.namespace_id = group.id
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ServiceDeskSettings::UpdateService do
describe '#execute' do
let_it_be(:settings) { create(:service_desk_setting, outgoing_name: 'original name') }
let_it_be(:user) { create(:user) }
context 'with valid params' do
let(:params) { { outgoing_name: 'some name', project_key: 'foo' } }
it 'updates service desk settings' do
result = described_class.new(settings.project, user, params).execute
expect(result[:status]).to eq :success
expect(settings.reload.outgoing_name).to eq 'some name'
expect(settings.reload.project_key).to eq 'foo'
end
context 'when service_desk_custom_address is disabled' do
before do
stub_feature_flags(service_desk_custom_address: false)
end
it 'ignores project_key parameter' do
result = described_class.new(settings.project, user, params).execute
expect(result[:status]).to eq :success
expect(settings.reload.project_key).to be_nil
end
end
end
context 'with invalid params' do
let(:params) { { outgoing_name: 'x' * 256 } }
it 'does not update service desk settings' do
result = described_class.new(settings.project, user, params).execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq 'Outgoing name is too long (maximum is 255 characters)'
expect(settings.reload.outgoing_name).to eq 'original name'
end
end
end
end
# frozen_string_literal: true
require 'rake_helper'
RSpec.describe 'gitlab:external_diffs rake tasks' do
before do
Rake.application.rake_require 'tasks/gitlab/external_diffs'
end
describe 'force_object_storage task' do
it 'forces externally stored merge request diffs to object storage' do
db = create(:merge_request).merge_request_diff
file = create(:merge_request).merge_request_diff.tap { |o| o.update_columns(stored_externally: true, external_diff_store: 1) }
object = create(:merge_request).merge_request_diff.tap { |o| o.update_columns(stored_externally: true, external_diff_store: 2) }
run_rake_task('gitlab:external_diffs:force_object_storage')
expect(db.reload).not_to be_stored_externally
expect(file.reload).to be_stored_externally
expect(object.reload).to be_stored_externally
expect(file.external_diff_store).to eq(2)
expect(object.external_diff_store).to eq(2)
end
it 'limits batches according to BATCH_SIZE, START_ID, and END_ID' do
stub_env('START_ID' => 'foo', 'END_ID' => 'bar', 'BATCH_SIZE' => 'baz')
expect(MergeRequestDiff).to receive(:in_batches).with(start: 'foo', finish: 'bar', of: 'baz')
run_rake_task('gitlab:external_diffs:force_object_storage')
end
end
end
......@@ -4,6 +4,6 @@
source 'https://rubygems.org'
gem 'overcommit'
gem 'gitlab-styles', '~> 4.2.0', require: false
gem 'gitlab-styles', '~> 4.3.0', require: false
gem 'scss_lint', '~> 0.56.0', require: false
gem 'haml_lint', '~> 0.34.0', require: false
......@@ -11,7 +11,7 @@ GEM
childprocess (3.0.0)
concurrent-ruby (1.1.6)
ffi (1.12.2)
gitlab-styles (4.2.0)
gitlab-styles (4.3.0)
rubocop (~> 0.82.0)
rubocop-gitlab-security (~> 0.1.0)
rubocop-performance (~> 1.5.2)
......@@ -83,7 +83,7 @@ PLATFORMS
ruby
DEPENDENCIES
gitlab-styles (~> 4.2.0)
gitlab-styles (~> 4.3.0)
haml_lint (~> 0.34.0)
overcommit
scss_lint (~> 0.56.0)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册