提交 4233d3aa 编写于 作者: G GitLab Bot

Add latest changes from gitlab-org/gitlab@master

上级 e357d495
......@@ -67,15 +67,12 @@ build-assets-image:
stage: build-images
needs: ["gitlab:assets:compile pull-cache"]
variables:
GIT_STRATEGY: none
GIT_DEPTH: "1"
script:
- wget -O ./build_assets_image "${CI_PROJECT_URL}/raw/${CI_COMMIT_SHA}/scripts/build_assets_image"
- wget -O ./Dockerfile.assets "${CI_PROJECT_URL}/raw/${CI_COMMIT_SHA}/Dockerfile.assets"
- chmod +x build_assets_image
# TODO: Change the image tag to be the MD5 of assets files and skip image building if the image exists
# We'll also need to pass GITLAB_ASSETS_TAG to the trigerred omnibus-gitlab pipeline similarly to how we do it for trigerred CNG pipelines
# https://gitlab.com/gitlab-org/gitlab/issues/208389
- ./build_assets_image
- scripts/build_assets_image
.compile-assets-metadata:
extends:
......
......@@ -111,8 +111,8 @@ export default {
const filterGroupsBy = getParameterByName('filter') || null;
this.isLoading = true;
// eslint-disable-next-line promise/catch-or-return
this.fetchGroups({
return this.fetchGroups({
page,
filterGroupsBy,
sortBy,
......@@ -126,8 +126,7 @@ export default {
fetchPage(page, filterGroupsBy, sortBy, archived) {
this.isLoading = true;
// eslint-disable-next-line promise/catch-or-return
this.fetchGroups({
return this.fetchGroups({
page,
filterGroupsBy,
sortBy,
......
......@@ -108,14 +108,14 @@ export default {
return acc.concat({
name,
path,
to: `/-/tree/${joinPaths(escapeFileUrl(this.ref), path)}`,
to: `/-/tree/${joinPaths(this.escapedRef, path)}`,
});
},
[
{
name: this.projectShortPath,
path: '/',
to: `/-/tree/${escapeFileUrl(this.ref)}/`,
to: `/-/tree/${this.escapedRef}/`,
},
],
);
......
......@@ -81,7 +81,7 @@ export default {
<tbody>
<parent-row
v-show="showParentRow"
:commit-ref="ref"
:commit-ref="escapedRef"
:path="path"
:loading-path="loadingPath"
/>
......
<script>
import { GlLoadingIcon } from '@gitlab/ui';
import { escapeFileUrl } from '~/lib/utils/url_utility';
export default {
components: {
......@@ -29,7 +28,7 @@ export default {
return splitArray.map(p => encodeURIComponent(p)).join('/');
},
parentRoute() {
return { path: `/-/tree/${escapeFileUrl(this.commitRef)}/${this.parentPath}` };
return { path: `/-/tree/${this.commitRef}/${this.parentPath}` };
},
},
methods: {
......
......@@ -99,7 +99,7 @@ export default {
computed: {
routerLinkTo() {
return this.isFolder
? { path: `/-/tree/${escapeFileUrl(this.ref)}/${escapeFileUrl(this.path)}` }
? { path: `/-/tree/${this.escapedRef}/${escapeFileUrl(this.path)}` }
: null;
},
isFolder() {
......
......@@ -15,14 +15,15 @@ import { __ } from '../locale';
export default function setupVueRepositoryList() {
const el = document.getElementById('js-tree-list');
const { dataset } = el;
const { projectPath, projectShortPath, ref, fullName } = dataset;
const router = createRouter(projectPath, ref);
const { projectPath, projectShortPath, ref, escapedRef, fullName } = dataset;
const router = createRouter(projectPath, escapedRef);
apolloProvider.clients.defaultClient.cache.writeData({
data: {
projectPath,
projectShortPath,
ref,
escapedRef,
vueFileListLfsBadge: gon.features?.vueFileListLfsBadge || false,
commits: [],
},
......
......@@ -23,13 +23,13 @@ export function fetchLogsTree(client, path, offset, resolver = null) {
if (fetchpromise) return fetchpromise;
const { projectPath } = client.readQuery({ query: getProjectPath });
const { ref } = client.readQuery({ query: getRef });
const { escapedRef } = client.readQuery({ query: getRef });
fetchpromise = axios
.get(
`${gon.relative_url_root}/${projectPath}/-/refs/${encodeURIComponent(
ref,
)}/logs_tree/${encodeURIComponent(path.replace(/^\//, ''))}`,
`${gon.relative_url_root}/${projectPath}/-/refs/${escapedRef}/logs_tree/${encodeURIComponent(
path.replace(/^\//, ''),
)}`,
{
params: { format: 'json', offset },
},
......
......@@ -4,11 +4,19 @@ export default {
apollo: {
ref: {
query: getRef,
manual: true,
result({ data, loading }) {
if (!loading) {
this.ref = data.ref;
this.escapedRef = data.escapedRef;
}
},
},
},
data() {
return {
ref: '',
escapedRef: '',
};
},
};
......@@ -12,7 +12,7 @@ export default function createRouter(base, baseRef) {
base: joinPaths(gon.relative_url_root || '', base),
routes: [
{
path: `(/-)?/tree/(${encodeURIComponent(baseRef).replace(/%2F/g, '/')}|${baseRef})/:path*`,
path: `(/-)?/tree/${baseRef}/:path*`,
name: 'treePath',
component: TreePage,
props: route => ({
......
......@@ -194,6 +194,7 @@ module TreeHelper
project_path: project.full_path,
project_short_path: project.path,
ref: ref,
escaped_ref: ActionDispatch::Journey::Router::Utils.escape_path(ref),
full_name: project.name_with_namespace
}
end
......
......@@ -11,3 +11,5 @@ module Emails
end
end
end
Emails::BaseService.prepend_if_ee('::EE::Emails::BaseService')
......@@ -10,7 +10,10 @@ module Projects
return forbidden unless alerts_service_activated?
return unauthorized unless valid_token?(token)
process_incident_issues if process_issues?
alert = create_alert
return bad_request unless alert.persisted?
process_incident_issues(alert) if process_issues?
send_alert_email if send_email?
ServiceResponse.success
......@@ -22,13 +25,21 @@ module Projects
delegate :alerts_service, :alerts_service_activated?, to: :project
def am_alert_params
Gitlab::AlertManagement::AlertParams.from_generic_alert(project: project, payload: params.to_h)
end
def create_alert
AlertManagement::Alert.create(am_alert_params)
end
def send_email?
incident_management_setting.send_email?
end
def process_incident_issues
def process_incident_issues(alert)
IncidentManagement::ProcessAlertWorker
.perform_async(project.id, parsed_payload)
.perform_async(project.id, parsed_payload, alert.id)
end
def send_alert_email
......
......@@ -7,11 +7,14 @@ module IncidentManagement
queue_namespace :incident_management
feature_category :incident_management
def perform(project_id, alert)
def perform(project_id, alert_payload, am_alert_id = nil)
project = find_project(project_id)
return unless project
create_issue(project, alert)
new_issue = create_issue(project, alert_payload)
return unless am_alert_id && new_issue.persisted?
link_issue_with_alert(am_alert_id, new_issue.id)
end
private
......@@ -20,10 +23,24 @@ module IncidentManagement
Project.find_by_id(project_id)
end
def create_issue(project, alert)
def create_issue(project, alert_payload)
IncidentManagement::CreateIssueService
.new(project, alert)
.new(project, alert_payload)
.execute
end
def link_issue_with_alert(alert_id, issue_id)
alert = AlertManagement::Alert.find_by_id(alert_id)
return unless alert
return if alert.update(issue_id: issue_id)
Gitlab::GitLogger.warn(
message: 'Cannot link an Issue with Alert',
issue_id: issue_id,
alert_id: alert_id,
alert_errors: alert.errors.messages
)
end
end
end
---
title: Add `web_url` to branch API response
merge_request: 30147
author:
type: added
---
title: Fixes branch name not getting escaped correctly on frontend
merge_request:
author:
type: fixed
---
title: Move prepend to last line in ee/services
merge_request: 30425
author: Rajendra Kadam
type: fixed
......@@ -17,6 +17,7 @@ If you want to create a changelog entry for GitLab EE, run the following instead
bin/changelog --ee -m %<mr_iid>s "%<mr_title>s"
```
If this merge request [doesn't need a CHANGELOG entry](https://docs.gitlab.com/ee/development/changelog.html#what-warrants-a-changelog-entry), feel free to ignore this message.
MSG
def check_changelog_yaml(path)
......@@ -65,6 +66,6 @@ if changelog_found
check_changelog_yaml(changelog_found)
check_changelog_path(changelog_found)
elsif changelog.needed?
message "**[CHANGELOG missing](https://docs.gitlab.com/ee/development/changelog.html)**: If this merge request [doesn't need a CHANGELOG entry](https://docs.gitlab.com/ee/development/changelog.html#what-warrants-a-changelog-entry), feel free to ignore this message.\n\n" +
message "**[CHANGELOG missing](https://docs.gitlab.com/ee/development/changelog.html)**:\n\n" +
format(CREATE_CHANGELOG_MESSAGE, mr_iid: gitlab.mr_json["iid"], mr_title: sanitized_mr_title)
end
......@@ -41,6 +41,7 @@ Example response:
"developers_can_push": false,
"developers_can_merge": false,
"can_push": true,
"web_url": "http://gitlab.example.com/my-group/my-project/-/tree/master",
"commit": {
"author_email": "john@example.com",
"author_name": "John Smith",
......@@ -96,6 +97,7 @@ Example response:
"developers_can_push": false,
"developers_can_merge": false,
"can_push": true,
"web_url": "http://gitlab.example.com/my-group/my-project/-/tree/master",
"commit": {
"author_email": "john@example.com",
"author_name": "John Smith",
......@@ -171,7 +173,8 @@ Example response:
"default": false,
"developers_can_push": false,
"developers_can_merge": false,
"can_push": true
"can_push": true,
"web_url": "http://gitlab.example.com/my-group/my-project/-/tree/newbranch"
}
```
......
......@@ -3450,6 +3450,36 @@ type GeoNode {
"""
name: String
"""
Package file registries of the GeoNode. Available only when feature flag `geo_self_service_framework` is enabled
"""
packageFileRegistries(
"""
Returns the elements in the list that come after the specified cursor.
"""
after: String
"""
Returns the elements in the list that come before the specified cursor.
"""
before: String
"""
Returns the first _n_ elements from the list.
"""
first: Int
"""
Filters registries by their ID
"""
ids: [ID!]
"""
Returns the last _n_ elements from the list.
"""
last: Int
): PackageFileRegistryConnection
"""
Indicates whether this Geo node is the primary
"""
......@@ -6329,6 +6359,86 @@ interface Noteable {
): NoteConnection!
}
"""
Represents the sync and verification state of a package file
"""
type PackageFileRegistry {
"""
Timestamp when the PackageFileRegistry was created
"""
createdAt: Time
"""
ID of the PackageFileRegistry
"""
id: ID!
"""
Error message during sync of the PackageFileRegistry
"""
lastSyncFailure: String
"""
Timestamp of the most recent successful sync of the PackageFileRegistry
"""
lastSyncedAt: Time
"""
ID of the PackageFile
"""
packageFileId: ID!
"""
Timestamp after which the PackageFileRegistry should be resynced
"""
retryAt: Time
"""
Number of consecutive failed sync attempts of the PackageFileRegistry
"""
retryCount: Int
"""
Sync state of the PackageFileRegistry
"""
state: RegistryState
}
"""
The connection type for PackageFileRegistry.
"""
type PackageFileRegistryConnection {
"""
A list of edges.
"""
edges: [PackageFileRegistryEdge]
"""
A list of nodes.
"""
nodes: [PackageFileRegistry]
"""
Information to aid in pagination.
"""
pageInfo: PageInfo!
}
"""
An edge in a connection.
"""
type PackageFileRegistryEdge {
"""
A cursor for use in pagination.
"""
cursor: String!
"""
The item at the end of the edge.
"""
node: PackageFileRegistry
}
"""
Information about pagination in a connection.
"""
......@@ -7805,6 +7915,31 @@ type Query {
): VulnerabilityConnection
}
"""
State of a Geo registry.
"""
enum RegistryState {
"""
Registry that failed to sync
"""
FAILED
"""
Registry waiting to be synced
"""
PENDING
"""
Registry currently syncing
"""
STARTED
"""
Registry that is synced
"""
SYNCED
}
"""
Autogenerated input type of RemoveAwardEmoji
"""
......
......@@ -9926,6 +9926,77 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "packageFileRegistries",
"description": "Package file registries of the GeoNode. Available only when feature flag `geo_self_service_framework` is enabled",
"args": [
{
"name": "ids",
"description": "Filters registries by their ID",
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
}
},
"defaultValue": null
},
{
"name": "after",
"description": "Returns the elements in the list that come after the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "before",
"description": "Returns the elements in the list that come before the specified cursor.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "first",
"description": "Returns the first _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
},
{
"name": "last",
"description": "Returns the last _n_ elements from the list.",
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "PackageFileRegistryConnection",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "primary",
"description": "Indicates whether this Geo node is the primary",
......@@ -19091,6 +19162,251 @@
}
]
},
{
"kind": "OBJECT",
"name": "PackageFileRegistry",
"description": "Represents the sync and verification state of a package file",
"fields": [
{
"name": "createdAt",
"description": "Timestamp when the PackageFileRegistry was created",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "id",
"description": "ID of the PackageFileRegistry",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "lastSyncFailure",
"description": "Error message during sync of the PackageFileRegistry",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "lastSyncedAt",
"description": "Timestamp of the most recent successful sync of the PackageFileRegistry",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "packageFileId",
"description": "ID of the PackageFile",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "retryAt",
"description": "Timestamp after which the PackageFileRegistry should be resynced",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Time",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "retryCount",
"description": "Number of consecutive failed sync attempts of the PackageFileRegistry",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "state",
"description": "Sync state of the PackageFileRegistry",
"args": [
],
"type": {
"kind": "ENUM",
"name": "RegistryState",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "PackageFileRegistryConnection",
"description": "The connection type for PackageFileRegistry.",
"fields": [
{
"name": "edges",
"description": "A list of edges.",
"args": [
],
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "PackageFileRegistryEdge",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "nodes",
"description": "A list of nodes.",
"args": [
],
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "PackageFileRegistry",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "pageInfo",
"description": "Information to aid in pagination.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "OBJECT",
"name": "PageInfo",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "PackageFileRegistryEdge",
"description": "An edge in a connection.",
"fields": [
{
"name": "cursor",
"description": "A cursor for use in pagination.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "node",
"description": "The item at the end of the edge.",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "PackageFileRegistry",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "PageInfo",
......@@ -23239,6 +23555,41 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "ENUM",
"name": "RegistryState",
"description": "State of a Geo registry.",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{
"name": "PENDING",
"description": "Registry waiting to be synced",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "STARTED",
"description": "Registry currently syncing",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "SYNCED",
"description": "Registry that is synced",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "FAILED",
"description": "Registry that failed to sync",
"isDeprecated": false,
"deprecationReason": null
}
],
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "RemoveAwardEmojiInput",
......
......@@ -962,6 +962,21 @@ Represents a milestone.
| `readNote` | Boolean! | Indicates the user can perform `read_note` on this resource |
| `resolveNote` | Boolean! | Indicates the user can perform `resolve_note` on this resource |
## PackageFileRegistry
Represents the sync and verification state of a package file
| Name | Type | Description |
| --- | ---- | ---------- |
| `createdAt` | Time | Timestamp when the PackageFileRegistry was created |
| `id` | ID! | ID of the PackageFileRegistry |
| `lastSyncFailure` | String | Error message during sync of the PackageFileRegistry |
| `lastSyncedAt` | Time | Timestamp of the most recent successful sync of the PackageFileRegistry |
| `packageFileId` | ID! | ID of the PackageFile |
| `retryAt` | Time | Timestamp after which the PackageFileRegistry should be resynced |
| `retryCount` | Int | Number of consecutive failed sync attempts of the PackageFileRegistry |
| `state` | RegistryState | Sync state of the PackageFileRegistry |
## PageInfo
Information about pagination in a connection.
......
......@@ -312,8 +312,8 @@ GET /projects?custom_attributes[key]=value&custom_attributes[other_key]=other_va
### Pagination limits
From GitLab 12.10, [offset-based pagination](README.md#offset-based-pagination) will be
[limited to 10,000 records](https://gitlab.com/gitlab-org/gitlab/issues/34565).
From GitLab 13.0, [offset-based pagination](README.md#offset-based-pagination) will be
[limited to 50,000 records](https://gitlab.com/gitlab-org/gitlab/issues/34565).
[Keyset pagination](README.md#keyset-based-pagination) will be required to retrieve projects
beyond this limit.
......
......@@ -289,3 +289,16 @@ One should apply to be a Vue.js expert by opening an MR when the Merge Request's
- Full understanding of testing a Vue and Vuex application
- Vuex code follows the [documented pattern](vuex.md#actions-pattern-request-and-receive-namespaces)
- Knowledge about the existing Vue and Vuex applications and existing reusable components
## Vue 2 -> Vue 3 Migration
> This section is added temporarily to support the efforts to migrate the codebase from Vue 2.x to Vue 3.x
Currently, we recommend to minimize adding certain features to the codebase to prevent increasing the tech debt for the eventual migration:
- filters;
- event buses;
- functional templated
- `slot` attributes
You can find more details on [Migration to Vue 3](vue3_migration.md)
# Migration to Vue 3
In order to prepare for the eventual migration to Vue 3.x, we should be wary about adding the following features to the codebase:
## Vue filters
**Why?**
Filters [are removed](https://github.com/vuejs/rfcs/blob/master/active-rfcs/0015-remove-filters.md) from the Vue 3 API completely.
**What to use instead**
Component's computed properties / methods or external helpers.
## Event bus
**Why?**
`$on` and `$off` methods [are removed](https://github.com/vuejs/rfcs/blob/master/active-rfcs/0020-events-api-change.md) from the Vue instance, so in Vue 3 it can't be used to create an event bus.
**What to use instead**
Vue docs recommend using [mitt](https://github.com/developit/mitt) library. It's relatively small (200 bytes gzipped) and has a simple API:
```javascript
import mitt from 'mitt'
const emitter = mitt()
// listen to an event
emitter.on('foo', e => console.log('foo', e) )
// listen to all events
emitter.on('*', (type, e) => console.log(type, e) )
// fire an event
emitter.emit('foo', { a: 'b' })
// working with handler references:
function onFoo() {}
emitter.on('foo', onFoo) // listen
emitter.off('foo', onFoo) // unlisten
```
## <template functional>
**Why?**
In Vue 3, `{ functional: true }` option [is removed](https://github.com/vuejs/rfcs/blob/functional-async-api-change/active-rfcs/0007-functional-async-api-change.md) and `<template functional>` is no longer supported.
**What to use instead**
Functional components must be written as plain functions:
```javascript
import { h } from 'vue'
const FunctionalComp = (props, slots) => {
return h('div', `Hello! ${props.name}`)
}
```
## Old slots syntax with `slot` attribute
**Why?**
In Vue 2.6 `slot` attribute was already deprecated in favor of `v-slot` directive but its usage is still allowed and sometimes we prefer using them because it simplifies unit tests (with old syntax, slots are rendered on `shallowMount`). However, in Vue 3 we can't use old syntax anymore.
**What to use instead**
The syntax with `v-slot` directive. To fix rendering slots in `shallowMount`, we need to stub a child component with slots explicitly.
```html
<!-- MyAwesomeComponent.vue -->
<script>
import SomeChildComponent from './some_child_component.vue'
export default {
components: {
SomeChildComponent
}
}
</script>
<template>
<div>
<h1>Hello GitLab!</h1>
<some-child-component>
<template #header>
Header content
</template>
</some-child-component>
</div>
</template>
```
```js
// MyAwesomeComponent.spec.js
import SomeChildComponent from '~/some_child_component.vue'
shallowMount(MyAwesomeComponent, {
stubs: {
SomeChildComponent
}
})
```
......@@ -161,49 +161,7 @@ state.
For example, to add support for files referenced by a `Widget` model with a
`widgets` table, you would perform the following steps:
1. Add verification state fields to the `widgets` table so the Geo primary can
track verification state:
```ruby
# frozen_string_literal: true
class AddVerificationStateToWidgets < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
add_column :widgets, :verification_retry_at, :datetime_with_timezone
add_column :widgets, :verified_at, :datetime_with_timezone
add_column :widgets, :verification_checksum, :string
add_column :widgets, :verification_failure, :string
add_column :widgets, :verification_retry_count, :integer
end
end
```
1. Add a partial index on `verification_failure` and `verification_checksum` to ensure
re-verification can be performed efficiently:
```ruby
# frozen_string_literal: true
class AddVerificationFailureIndexToWidgets < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :widgets, :verification_failure, where: "(verification_failure IS NOT NULL)", name: "widgets_verification_failure_partial"
add_concurrent_index :widgets, :verification_checksum, where: "(verification_checksum IS NOT NULL)", name: "widgets_verification_checksum_partial"
end
def down
remove_concurrent_index :widgets, :verification_failure
remove_concurrent_index :widgets, :verification_checksum
end
end
```
#### Replication
1. Include `Gitlab::Geo::ReplicableModel` in the `Widget` class, and specify
the Replicator class `with_replicator Geo::WidgetReplicator`.
......@@ -350,11 +308,53 @@ For example, to add support for files referenced by a `Widget` model with a
end
```
Widget files should now be replicated and verified by Geo!
Widgets should now be replicated by Geo!
#### Verification
1. Add verification state fields to the `widgets` table so the Geo primary can
track verification state:
```ruby
# frozen_string_literal: true
class AddVerificationStateToWidgets < ActiveRecord::Migration[6.0]
DOWNTIME = false
### Verification statistics with Blob Replicator Strategy
def change
add_column :widgets, :verification_retry_at, :datetime_with_timezone
add_column :widgets, :verified_at, :datetime_with_timezone
add_column :widgets, :verification_checksum, :string
add_column :widgets, :verification_failure, :string
add_column :widgets, :verification_retry_count, :integer
end
end
```
GitLab Geo stores statistic data in the `geo_node_statuses` table.
1. Add a partial index on `verification_failure` and `verification_checksum` to ensure
re-verification can be performed efficiently:
```ruby
# frozen_string_literal: true
class AddVerificationFailureIndexToWidgets < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :widgets, :verification_failure, where: "(verification_failure IS NOT NULL)", name: "widgets_verification_failure_partial"
add_concurrent_index :widgets, :verification_checksum, where: "(verification_checksum IS NOT NULL)", name: "widgets_verification_checksum_partial"
end
def down
remove_concurrent_index :widgets, :verification_failure
remove_concurrent_index :widgets, :verification_checksum
end
end
```
1. Add fields `widget_count`, `widget_checksummed_count`, and `widget_checksum_failed_count`
to `GeoNodeStatus#RESOURCE_STATUS_FIELDS` array in `ee/app/models/geo_node_status.rb`.
......@@ -378,3 +378,134 @@ GitLab Geo stores statistic data in the `geo_node_statuses` table.
1. Update `Sidekiq metrics` table in `doc/administration/monitoring/prometheus/gitlab_metrics.md` with new fields.
1. Update `GET /geo_nodes/status` example response in `doc/api/geo_nodes.md` with new fields.
1. Update `ee/spec/models/geo_node_status_spec.rb` and `ee/spec/factories/geo_node_statuses.rb` with new fields.
To do: Add verification on secondaries.
Widgets should now be verified by Geo!
#### GraphQL API
1. Add a new field to `GeoNodeType` in
`ee/app/graphql/types/geo/geo_node_type.rb`:
```ruby
field :widget_registries, ::Types::Geo::WidgetRegistryType.connection_type,
null: true,
resolver: ::Resolvers::Geo::WidgetRegistriesResolver,
description: 'Find widget registries on this Geo node',
feature_flag: :geo_self_service_framework
```
1. Add the new `widget_registries` field name to the `expected_fields` array in
`ee/spec/graphql/types/geo/geo_node_type_spec.rb`.
1. Create `ee/app/graphql/resolvers/geo/widget_registries_resolver.rb`:
```ruby
# frozen_string_literal: true
module Resolvers
module Geo
class WidgetRegistriesResolver < BaseResolver
include RegistriesResolver
end
end
end
```
1. Create `ee/spec/graphql/resolvers/geo/widget_registries_resolver_spec.rb`:
```ruby
# frozen_string_literal: true
require 'spec_helper'
describe Resolvers::Geo::WidgetRegistriesResolver do
it_behaves_like 'a Geo registries resolver', :widget_registry
end
```
1. Create `ee/app/finders/geo/widget_registry_finder.rb`:
```ruby
# frozen_string_literal: true
module Geo
class WidgetRegistryFinder
include FrameworkRegistryFinder
end
end
```
1. Create `ee/spec/finders/geo/widget_registry_finder_spec.rb`:
```ruby
# frozen_string_literal: true
require 'spec_helper'
describe Geo::WidgetRegistryFinder do
it_behaves_like 'a framework registry finder', :widget_registry
end
```
1. Create `ee/app/graphql/types/geo/package_file_registry_type.rb`:
```ruby
# frozen_string_literal: true
module Types
module Geo
# rubocop:disable Graphql/AuthorizeTypes because it is included
class WidgetRegistryType < BaseObject
include ::Types::Geo::RegistryType
graphql_name 'WidgetRegistry'
description 'Represents the sync and verification state of a widget'
field :widget_id, GraphQL::ID_TYPE, null: false, description: 'ID of the Widget'
end
end
end
```
1. Create `ee/spec/graphql/types/geo/widget_registry_type_spec.rb`:
```ruby
# frozen_string_literal: true
require 'spec_helper'
describe GitlabSchema.types['WidgetRegistry'] do
it_behaves_like 'a Geo registry type'
it 'has the expected fields (other than those included in RegistryType)' do
expected_fields = %i[widget_id]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
end
end
```
1. Add integration tests for providing Widget registry data to the frontend via
the GraphQL API, by duplicating and modifying the following shared examples
in `ee/spec/requests/api/graphql/geo/registries_spec.rb`:
```ruby
it_behaves_like 'gets registries for', {
field_name: 'widgetRegistries',
registry_class_name: 'WidgetRegistry',
registry_factory: :widget_registry,
registry_foreign_key_field_name: 'widgetId'
}
```
Individual widget synchronization and verification data should now be available
via the GraphQL API!
#### Admin UI
To do.
Widget sync and verification data (aggregate and individual) should now be
available in the Admin UI!
......@@ -257,7 +257,7 @@ To do so:
1. Confirm the action by typing the project's path as instructed.
NOTE: **Note:**
Only project maintainers have the [permissions](../../permissions.md#project-members-permissions)
Only project owners have the [permissions](../../permissions.md#project-members-permissions)
to remove a fork relationship.
## Operations settings
......
......@@ -3,6 +3,8 @@
module API
module Entities
class Branch < Grape::Entity
include Gitlab::Routing
expose :name
expose :commit, using: Entities::Commit do |repo_branch, options|
......@@ -36,6 +38,10 @@ module API
expose :default do |repo_branch, options|
options[:project].default_branch == repo_branch.name
end
expose :web_url do |repo_branch|
project_tree_url(options[:project], repo_branch.name)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module AlertManagement
class AlertParams
def self.from_generic_alert(project:, payload:)
parsed_payload = Gitlab::Alerting::NotificationPayloadParser.call(payload).with_indifferent_access
annotations = parsed_payload[:annotations]
{
project_id: project.id,
title: annotations[:title],
description: annotations[:description],
monitoring_tool: annotations[:monitoring_tool],
service: annotations[:service],
hosts: Array(annotations[:hosts]),
payload: payload,
started_at: parsed_payload['startsAt']
}
end
end
end
end
......@@ -7,7 +7,8 @@
"protected",
"default",
"developers_can_push",
"developers_can_merge"
"developers_can_merge",
"web_url"
],
"properties" : {
"name": { "type": "string" },
......@@ -17,7 +18,8 @@
"default": { "type": "boolean" },
"developers_can_push": { "type": "boolean" },
"developers_can_merge": { "type": "boolean" },
"can_push": { "type": "boolean" }
"can_push": { "type": "boolean" },
"web_url": { "type": "uri" }
},
"additionalProperties": false
}
......@@ -18,15 +18,13 @@ const createComponent = (groups = mockGroups, parentGroup = mockParentGroupItem)
describe('GroupFolderComponent', () => {
let vm;
beforeEach(done => {
beforeEach(() => {
Vue.component('group-item', groupItemComponent);
vm = createComponent();
vm.$mount();
Vue.nextTick(() => {
done();
});
return Vue.nextTick();
});
afterEach(() => {
......
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import groupItemComponent from '~/groups/components/group_item.vue';
import groupFolderComponent from '~/groups/components/group_folder.vue';
import eventHub from '~/groups/event_hub';
import * as urlUtilities from '~/lib/utils/url_utility';
import { mockParentGroupItem, mockChildren } from '../mock_data';
const createComponent = (group = mockParentGroupItem, parentGroup = mockChildren[0]) => {
......@@ -17,14 +18,12 @@ const createComponent = (group = mockParentGroupItem, parentGroup = mockChildren
describe('GroupItemComponent', () => {
let vm;
beforeEach(done => {
beforeEach(() => {
Vue.component('group-folder', groupFolderComponent);
vm = createComponent();
Vue.nextTick(() => {
done();
});
return Vue.nextTick();
});
afterEach(() => {
......@@ -130,26 +129,24 @@ describe('GroupItemComponent', () => {
});
it('should emit `toggleChildren` event when expand is clicked on a group and it has children present', () => {
spyOn(eventHub, '$emit');
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
vm.onClickRowGroup(event);
expect(eventHub.$emit).toHaveBeenCalledWith('toggleChildren', vm.group);
});
it('should navigate page to group homepage if group does not have any children present', done => {
it('should navigate page to group homepage if group does not have any children present', () => {
jest.spyOn(urlUtilities, 'visitUrl').mockImplementation();
const group = Object.assign({}, mockParentGroupItem);
group.childrenCount = 0;
const newVm = createComponent(group);
const visitUrl = spyOnDependency(groupItemComponent, 'visitUrl').and.stub();
spyOn(eventHub, '$emit');
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
newVm.onClickRowGroup(event);
setTimeout(() => {
expect(eventHub.$emit).not.toHaveBeenCalled();
expect(visitUrl).toHaveBeenCalledWith(newVm.group.relativePath);
done();
}, 0);
expect(eventHub.$emit).not.toHaveBeenCalled();
expect(urlUtilities.visitUrl).toHaveBeenCalledWith(newVm.group.relativePath);
});
});
});
......@@ -167,7 +164,7 @@ describe('GroupItemComponent', () => {
const badgeEl = vm.$el.querySelector('.badge-warning');
expect(badgeEl).toBeDefined();
expect(badgeEl).toContainText('pending removal');
expect(badgeEl.innerHTML).toContain('pending removal');
});
});
......@@ -180,7 +177,7 @@ describe('GroupItemComponent', () => {
it('does not render the group pending removal badge', () => {
const groupTextContainer = vm.$el.querySelector('.group-text-container');
expect(groupTextContainer).not.toContainText('pending removal');
expect(groupTextContainer).not.toContain('pending removal');
});
});
......
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import groupsComponent from '~/groups/components/groups.vue';
import groupFolderComponent from '~/groups/components/group_folder.vue';
import groupItemComponent from '~/groups/components/group_item.vue';
......@@ -21,15 +21,13 @@ const createComponent = (searchEmpty = false) => {
describe('GroupsComponent', () => {
let vm;
beforeEach(done => {
beforeEach(() => {
Vue.component('group-folder', groupFolderComponent);
Vue.component('group-item', groupItemComponent);
vm = createComponent();
Vue.nextTick(() => {
done();
});
return vm.$nextTick();
});
afterEach(() => {
......@@ -39,37 +37,35 @@ describe('GroupsComponent', () => {
describe('methods', () => {
describe('change', () => {
it('should emit `fetchPage` event when page is changed via pagination', () => {
spyOn(eventHub, '$emit').and.stub();
jest.spyOn(eventHub, '$emit').mockImplementation();
vm.change(2);
expect(eventHub.$emit).toHaveBeenCalledWith(
'fetchPage',
2,
jasmine.any(Object),
jasmine.any(Object),
jasmine.any(Object),
expect.any(Object),
expect.any(Object),
expect.any(Object),
);
});
});
});
describe('template', () => {
it('should render component template correctly', done => {
Vue.nextTick(() => {
it('should render component template correctly', () => {
return vm.$nextTick().then(() => {
expect(vm.$el.querySelector('.groups-list-tree-container')).toBeDefined();
expect(vm.$el.querySelector('.group-list-tree')).toBeDefined();
expect(vm.$el.querySelector('.gl-pagination')).toBeDefined();
expect(vm.$el.querySelectorAll('.has-no-search-results').length).toBe(0);
done();
});
});
it('should render empty search message when `searchEmpty` is `true`', done => {
it('should render empty search message when `searchEmpty` is `true`', () => {
vm.searchEmpty = true;
Vue.nextTick(() => {
return vm.$nextTick().then(() => {
expect(vm.$el.querySelector('.has-no-search-results')).toBeDefined();
done();
});
});
});
......
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import itemActionsComponent from '~/groups/components/item_actions.vue';
import eventHub from '~/groups/event_hub';
import { mockParentGroupItem, mockChildren } from '../mock_data';
......@@ -28,7 +28,7 @@ describe('ItemActionsComponent', () => {
describe('methods', () => {
describe('onLeaveGroup', () => {
it('emits `showLeaveGroupModal` event with `group` and `parentGroup` props', () => {
spyOn(eventHub, '$emit');
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
vm.onLeaveGroup();
expect(eventHub.$emit).toHaveBeenCalledWith(
......
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import itemCaretComponent from '~/groups/components/item_caret.vue';
const createComponent = (isGroupOpen = false) => {
......@@ -12,27 +12,27 @@ const createComponent = (isGroupOpen = false) => {
};
describe('ItemCaretComponent', () => {
let vm;
afterEach(() => {
vm.$destroy();
});
describe('template', () => {
it('should render component template correctly', () => {
const vm = createComponent();
vm = createComponent();
expect(vm.$el.classList.contains('folder-caret')).toBeTruthy();
expect(vm.$el.querySelectorAll('svg').length).toBe(1);
vm.$destroy();
});
it('should render caret down icon if `isGroupOpen` prop is `true`', () => {
const vm = createComponent(true);
vm = createComponent(true);
expect(vm.$el.querySelector('svg use').getAttribute('xlink:href')).toContain('angle-down');
vm.$destroy();
});
it('should render caret right icon if `isGroupOpen` prop is `false`', () => {
const vm = createComponent();
vm = createComponent();
expect(vm.$el.querySelector('svg use').getAttribute('xlink:href')).toContain('angle-right');
vm.$destroy();
});
});
});
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import itemStatsComponent from '~/groups/components/item_stats.vue';
import {
mockParentGroupItem,
......
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import itemStatsValueComponent from '~/groups/components/item_stats_value.vue';
const createComponent = ({ title, cssClass, iconName, tooltipPlacement, value }) => {
......@@ -56,6 +56,10 @@ describe('ItemStatsValueComponent', () => {
});
});
afterEach(() => {
vm.$destroy();
});
it('renders component element correctly', () => {
expect(vm.$el.classList.contains('number-subgroups')).toBeTruthy();
expect(vm.$el.querySelectorAll('svg').length).toBeGreaterThan(0);
......@@ -74,9 +78,5 @@ describe('ItemStatsValueComponent', () => {
it('renders value count correctly', () => {
expect(vm.$el.querySelector('.stat-value').innerText.trim()).toContain('10');
});
afterEach(() => {
vm.$destroy();
});
});
});
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import itemTypeIconComponent from '~/groups/components/item_type_icon.vue';
import { ITEM_TYPE } from '../mock_data';
......@@ -17,7 +17,6 @@ describe('ItemTypeIconComponent', () => {
describe('template', () => {
it('should render component template correctly', () => {
const vm = createComponent();
vm.$mount();
expect(vm.$el.classList.contains('item-type-icon')).toBeTruthy();
vm.$destroy();
......@@ -27,13 +26,11 @@ describe('ItemTypeIconComponent', () => {
let vm;
vm = createComponent(ITEM_TYPE.GROUP, true);
vm.$mount();
expect(vm.$el.querySelector('use').getAttribute('xlink:href')).toContain('folder-open');
vm.$destroy();
vm = createComponent(ITEM_TYPE.GROUP);
vm.$mount();
expect(vm.$el.querySelector('use').getAttribute('xlink:href')).toContain('folder');
vm.$destroy();
......@@ -43,13 +40,11 @@ describe('ItemTypeIconComponent', () => {
let vm;
vm = createComponent(ITEM_TYPE.PROJECT);
vm.$mount();
expect(vm.$el.querySelector('use').getAttribute('xlink:href')).toContain('bookmark');
vm.$destroy();
vm = createComponent(ITEM_TYPE.GROUP);
vm.$mount();
expect(vm.$el.querySelector('use').getAttribute('xlink:href')).not.toContain('bookmark');
vm.$destroy();
......
......@@ -12,7 +12,7 @@ describe('GroupsService', () => {
describe('getGroups', () => {
it('should return promise for `GET` request on provided endpoint', () => {
spyOn(axios, 'get').and.stub();
jest.spyOn(axios, 'get').mockResolvedValue();
const params = {
page: 2,
filter: 'git',
......@@ -32,7 +32,7 @@ describe('GroupsService', () => {
describe('leaveGroup', () => {
it('should return promise for `DELETE` request on provided endpoint', () => {
spyOn(axios, 'delete').and.stub();
jest.spyOn(axios, 'delete').mockResolvedValue();
service.leaveGroup(mockParentGroupItem.leavePath);
......
......@@ -28,12 +28,12 @@ describe('ProjectsStore', () => {
describe('setGroups', () => {
it('should set groups to state', () => {
const store = new GroupsStore();
spyOn(store, 'formatGroupItem').and.callThrough();
jest.spyOn(store, 'formatGroupItem');
store.setGroups(mockGroups);
expect(store.state.groups.length).toBe(mockGroups.length);
expect(store.formatGroupItem).toHaveBeenCalledWith(jasmine.any(Object));
expect(store.formatGroupItem).toHaveBeenCalledWith(expect.any(Object));
expect(Object.keys(store.state.groups[0]).indexOf('fullName')).toBeGreaterThan(-1);
});
});
......@@ -41,12 +41,12 @@ describe('ProjectsStore', () => {
describe('setSearchedGroups', () => {
it('should set searched groups to state', () => {
const store = new GroupsStore();
spyOn(store, 'formatGroupItem').and.callThrough();
jest.spyOn(store, 'formatGroupItem');
store.setSearchedGroups(mockSearchedGroups);
expect(store.state.groups.length).toBe(mockSearchedGroups.length);
expect(store.formatGroupItem).toHaveBeenCalledWith(jasmine.any(Object));
expect(store.formatGroupItem).toHaveBeenCalledWith(expect.any(Object));
expect(Object.keys(store.state.groups[0]).indexOf('fullName')).toBeGreaterThan(-1);
expect(Object.keys(store.state.groups[0].children[0]).indexOf('fullName')).toBeGreaterThan(
-1,
......@@ -57,11 +57,11 @@ describe('ProjectsStore', () => {
describe('setGroupChildren', () => {
it('should set children to group item in state', () => {
const store = new GroupsStore();
spyOn(store, 'formatGroupItem').and.callThrough();
jest.spyOn(store, 'formatGroupItem');
store.setGroupChildren(mockParentGroupItem, mockRawChildren);
expect(store.formatGroupItem).toHaveBeenCalledWith(jasmine.any(Object));
expect(store.formatGroupItem).toHaveBeenCalledWith(expect.any(Object));
expect(mockParentGroupItem.children.length).toBe(1);
expect(Object.keys(mockParentGroupItem.children[0]).indexOf('fullName')).toBeGreaterThan(-1);
expect(mockParentGroupItem.isOpen).toBeTruthy();
......
......@@ -26,7 +26,7 @@ function factory(propsData = {}) {
},
});
vm.setData({ ref: 'master' });
vm.setData({ escapedRef: 'master' });
}
describe('Repository table row component', () => {
......
......@@ -53,7 +53,7 @@ describe('fetchLogsTree', () => {
client = {
readQuery: () => ({
projectPath: 'gitlab-org/gitlab-foss',
ref: 'master',
escapedRef: 'master',
commits: [],
}),
writeQuery: jest.fn(),
......@@ -86,16 +86,18 @@ describe('fetchLogsTree', () => {
it('calls entry resolver', () =>
fetchLogsTree(client, '', '0', resolver).then(() => {
expect(resolver.resolve).toHaveBeenCalledWith({
__typename: 'LogTreeCommit',
commitPath: 'https://test.com',
committedDate: '2019-01-01',
fileName: 'index.js',
filePath: '/index.js',
message: 'testing message',
sha: '123',
type: 'blob',
});
expect(resolver.resolve).toHaveBeenCalledWith(
expect.objectContaining({
__typename: 'LogTreeCommit',
commitPath: 'https://test.com',
committedDate: '2019-01-01',
fileName: 'index.js',
filePath: '/index.js',
message: 'testing message',
sha: '123',
type: 'blob',
}),
);
}));
it('writes query to client', () =>
......@@ -104,7 +106,7 @@ describe('fetchLogsTree', () => {
query: expect.anything(),
data: {
commits: [
{
expect.objectContaining({
__typename: 'LogTreeCommit',
commitPath: 'https://test.com',
committedDate: '2019-01-01',
......@@ -113,7 +115,7 @@ describe('fetchLogsTree', () => {
message: 'testing message',
sha: '123',
type: 'blob',
},
}),
],
},
});
......
......@@ -4,13 +4,12 @@ import createRouter from '~/repository/router';
describe('Repository router spec', () => {
it.each`
path | branch | component | componentName
${'/'} | ${'master'} | ${IndexPage} | ${'IndexPage'}
${'/tree/master'} | ${'master'} | ${TreePage} | ${'TreePage'}
${'/-/tree/master'} | ${'master'} | ${TreePage} | ${'TreePage'}
${'/-/tree/master/app/assets'} | ${'master'} | ${TreePage} | ${'TreePage'}
${'/-/tree/feature/test-%23/app/assets'} | ${'feature/test-#'} | ${TreePage} | ${'TreePage'}
${'/-/tree/123/app/assets'} | ${'master'} | ${null} | ${'null'}
path | branch | component | componentName
${'/'} | ${'master'} | ${IndexPage} | ${'IndexPage'}
${'/tree/master'} | ${'master'} | ${TreePage} | ${'TreePage'}
${'/-/tree/master'} | ${'master'} | ${TreePage} | ${'TreePage'}
${'/-/tree/master/app/assets'} | ${'master'} | ${TreePage} | ${'TreePage'}
${'/-/tree/123/app/assets'} | ${'master'} | ${null} | ${'null'}
`('sets component as $componentName for path "$path"', ({ path, component, branch }) => {
const router = createRouter('', branch);
......
import { mount, createLocalVue } from '@vue/test-utils';
import { mount } from '@vue/test-utils';
import MrWidgetPipelineContainer from '~/vue_merge_request_widget/components/mr_widget_pipeline_container.vue';
import MrWidgetPipeline from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
import ArtifactsApp from '~/vue_merge_request_widget/components/artifacts_list_app.vue';
import { mockStore } from '../mock_data';
const localVue = createLocalVue();
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
describe('MrWidgetPipelineContainer', () => {
let wrapper;
let mock;
const factory = (props = {}) => {
wrapper = mount(localVue.extend(MrWidgetPipelineContainer), {
wrapper = mount(MrWidgetPipelineContainer, {
propsData: {
mr: Object.assign({}, mockStore),
...props,
},
localVue,
});
};
beforeEach(() => {
mock = new MockAdapter(axios);
mock.onGet().reply(200, {});
});
afterEach(() => {
wrapper.destroy();
});
......@@ -30,21 +35,19 @@ describe('MrWidgetPipelineContainer', () => {
it('renders pipeline', () => {
expect(wrapper.find(MrWidgetPipeline).exists()).toBe(true);
expect(wrapper.find(MrWidgetPipeline).props()).toEqual(
jasmine.objectContaining({
pipeline: mockStore.pipeline,
pipelineCoverageDelta: mockStore.pipelineCoverageDelta,
ciStatus: mockStore.ciStatus,
hasCi: mockStore.hasCI,
sourceBranch: mockStore.sourceBranch,
sourceBranchLink: mockStore.sourceBranchLink,
}),
);
expect(wrapper.find(MrWidgetPipeline).props()).toMatchObject({
pipeline: mockStore.pipeline,
pipelineCoverageDelta: mockStore.pipelineCoverageDelta,
ciStatus: mockStore.ciStatus,
hasCi: mockStore.hasCI,
sourceBranch: mockStore.sourceBranch,
sourceBranchLink: mockStore.sourceBranchLink,
});
});
it('renders deployments', () => {
const expectedProps = mockStore.deployments.map(dep =>
jasmine.objectContaining({
expect.objectContaining({
deployment: dep,
showMetrics: false,
}),
......@@ -65,21 +68,19 @@ describe('MrWidgetPipelineContainer', () => {
it('renders pipeline', () => {
expect(wrapper.find(MrWidgetPipeline).exists()).toBe(true);
expect(wrapper.find(MrWidgetPipeline).props()).toEqual(
jasmine.objectContaining({
pipeline: mockStore.mergePipeline,
pipelineCoverageDelta: mockStore.pipelineCoverageDelta,
ciStatus: mockStore.ciStatus,
hasCi: mockStore.hasCI,
sourceBranch: mockStore.targetBranch,
sourceBranchLink: mockStore.targetBranch,
}),
);
expect(wrapper.find(MrWidgetPipeline).props()).toMatchObject({
pipeline: mockStore.mergePipeline,
pipelineCoverageDelta: mockStore.pipelineCoverageDelta,
ciStatus: mockStore.ciStatus,
hasCi: mockStore.hasCI,
sourceBranch: mockStore.targetBranch,
sourceBranchLink: mockStore.targetBranch,
});
});
it('renders deployments', () => {
const expectedProps = mockStore.postMergeDeployments.map(dep =>
jasmine.objectContaining({
expect.objectContaining({
deployment: dep,
showMetrics: true,
}),
......
# frozen_string_literal: true
require 'spec_helper'
describe API::Entities::Branch do
describe '#as_json' do
subject { entity.as_json }
let(:project) { create(:project, :public, :repository) }
let(:repository) { project.repository }
let(:branch) { repository.find_branch('master') }
let(:entity) { described_class.new(branch, project: project) }
it 'includes basic fields', :aggregate_failures do
is_expected.to include(
name: 'master',
commit: a_kind_of(Hash),
merged: false,
protected: false,
developers_can_push: false,
developers_can_merge: false,
can_push: false,
default: true,
web_url: Gitlab::Routing.url_helpers.project_tree_url(project, 'master')
)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::AlertManagement::AlertParams do
let_it_be(:project) { create(:project, :repository, :private) }
describe '.from_generic_alert' do
let(:started_at) { Time.current.change(usec: 0).rfc3339 }
let(:payload) do
{
'title' => 'Alert title',
'description' => 'Description',
'monitoring_tool' => 'Monitoring tool name',
'service' => 'Service',
'hosts' => ['gitlab.com'],
'start_time' => started_at,
'some' => { 'extra' => { 'payload' => 'here' } }
}
end
subject { described_class.from_generic_alert(project: project, payload: payload) }
it 'returns Alert compatible parameters' do
is_expected.to eq(
project_id: project.id,
title: 'Alert title',
description: 'Description',
monitoring_tool: 'Monitoring tool name',
service: 'Service',
hosts: ['gitlab.com'],
payload: payload,
started_at: started_at
)
end
context 'when there are no hosts in the payload' do
let(:payload) { {} }
it 'hosts param is an empty array' do
expect(subject[:hosts]).to be_empty
end
end
end
end
......@@ -12,11 +12,16 @@ describe Projects::Alerting::NotifyService do
shared_examples 'processes incident issues' do |amount|
let(:create_incident_service) { spy }
let(:new_alert) { instance_double(AlertManagement::Alert, id: 503, persisted?: true) }
it 'processes issues' do
expect(AlertManagement::Alert)
.to receive(:create)
.and_return(new_alert)
expect(IncidentManagement::ProcessAlertWorker)
.to receive(:perform_async)
.with(project.id, kind_of(Hash))
.with(project.id, kind_of(Hash), new_alert.id)
.exactly(amount).times
Sidekiq::Testing.inline! do
......@@ -59,6 +64,12 @@ describe Projects::Alerting::NotifyService do
end
end
shared_examples 'NotifyService does not create alert' do
it 'does not create alert' do
expect { subject }.not_to change(AlertManagement::Alert, :count)
end
end
describe '#execute' do
let(:token) { 'invalid-token' }
let(:starts_at) { Time.now.change(usec: 0) }
......@@ -88,6 +99,36 @@ describe Projects::Alerting::NotifyService do
.and_return(incident_management_setting)
end
context 'with valid payload' do
it 'creates AlertManagement::Alert' do
expect { subject }.to change(AlertManagement::Alert, :count).by(1)
end
it 'created alert has all data properly assigned' do
subject
alert = AlertManagement::Alert.last
alert_attributes = alert.attributes.except('id', 'iid', 'created_at', 'updated_at')
expect(alert_attributes).to eq(
'project_id' => project.id,
'issue_id' => nil,
'fingerprint' => nil,
'title' => 'alert title',
'description' => nil,
'monitoring_tool' => nil,
'service' => nil,
'hosts' => [],
'payload' => payload_raw,
'severity' => 'critical',
'status' => 'triggered',
'events' => 1,
'started_at' => alert.started_at,
'ended_at' => nil
)
end
end
it_behaves_like 'does not process incident issues'
context 'issue enabled' do
......@@ -103,6 +144,7 @@ describe Projects::Alerting::NotifyService do
end
it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
it_behaves_like 'NotifyService does not create alert'
end
end
......@@ -115,12 +157,14 @@ describe Projects::Alerting::NotifyService do
context 'with invalid token' do
it_behaves_like 'does not process incident issues due to error', http_status: :unauthorized
it_behaves_like 'NotifyService does not create alert'
end
context 'with deactivated Alerts Service' do
let!(:alerts_service) { create(:alerts_service, :inactive, project: project) }
it_behaves_like 'does not process incident issues due to error', http_status: :forbidden
it_behaves_like 'NotifyService does not create alert'
end
end
end
......
......@@ -6,16 +6,24 @@ describe IncidentManagement::ProcessAlertWorker do
let_it_be(:project) { create(:project) }
describe '#perform' do
let(:alert) { :alert }
let(:create_issue_service) { spy(:create_issue_service) }
let(:alert_management_alert_id) { nil }
let(:alert_payload) { { alert: 'payload' } }
let(:new_issue) { create(:issue, project: project) }
let(:create_issue_service) { instance_double(IncidentManagement::CreateIssueService, execute: new_issue) }
subject { described_class.new.perform(project.id, alert) }
subject { described_class.new.perform(project.id, alert_payload, alert_management_alert_id) }
before do
allow(IncidentManagement::CreateIssueService)
.to receive(:new).with(project, alert_payload)
.and_return(create_issue_service)
end
it 'calls create issue service' do
expect(Project).to receive(:find_by_id).and_call_original
expect(IncidentManagement::CreateIssueService)
.to receive(:new).with(project, :alert)
.to receive(:new).with(project, alert_payload)
.and_return(create_issue_service)
expect(create_issue_service).to receive(:execute)
......@@ -26,7 +34,7 @@ describe IncidentManagement::ProcessAlertWorker do
context 'with invalid project' do
let(:invalid_project_id) { 0 }
subject { described_class.new.perform(invalid_project_id, alert) }
subject { described_class.new.perform(invalid_project_id, alert_payload) }
it 'does not create issues' do
expect(Project).to receive(:find_by_id).and_call_original
......@@ -35,5 +43,54 @@ describe IncidentManagement::ProcessAlertWorker do
subject
end
end
context 'when alert_management_alert_id is present' do
let!(:alert) { create(:alert_management_alert, project: project) }
let(:alert_management_alert_id) { alert.id }
before do
allow(AlertManagement::Alert)
.to receive(:find_by_id)
.with(alert_management_alert_id)
.and_return(alert)
allow(Gitlab::GitLogger).to receive(:warn).and_call_original
end
context 'when alert can be updated' do
it 'updates AlertManagement::Alert#issue_id' do
expect { subject }.to change { alert.reload.issue_id }.to(new_issue.id)
end
it 'does not write a warning to log' do
subject
expect(Gitlab::GitLogger).not_to have_received(:warn)
end
end
context 'when alert cannot be updated' do
before do
# invalidate alert
too_many_hosts = Array.new(AlertManagement::Alert::HOSTS_MAX_LENGTH + 1) { |_| 'host' }
alert.update_columns(hosts: too_many_hosts)
end
it 'updates AlertManagement::Alert#issue_id' do
expect { subject }.not_to change { alert.reload.issue_id }
end
it 'writes a worning to log' do
subject
expect(Gitlab::GitLogger).to have_received(:warn).with(
message: 'Cannot link an Issue with Alert',
issue_id: new_issue.id,
alert_id: alert_management_alert_id,
alert_errors: { hosts: ['hosts array is over 255 chars'] }
)
end
end
end
end
end
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册