提交 6df79435 编写于 作者: G GitLab Bot

Add latest changes from gitlab-org/gitlab@master

上级 5605efec
......@@ -17,6 +17,11 @@ export default {
type: Boolean,
required: true,
},
isDraggingDesign: {
type: Boolean,
required: false,
default: false,
},
},
data() {
return {
......@@ -121,7 +126,7 @@ export default {
</slot>
<transition name="design-dropzone-fade">
<div
v-show="dragging"
v-show="dragging && !isDraggingDesign"
class="card design-dropzone-border design-dropzone-overlay w-100 h-100 position-absolute d-flex-center p-3 bg-white"
>
<div v-show="!isDragDataValid" class="mw-50 text-center">
......
#import "../fragments/design_list.fragment.graphql"
mutation DesignManagementMove(
$id: DesignManagementDesignID!
$previous: DesignManagementDesignID
$next: DesignManagementDesignID
) {
designManagementMove(input: { id: $id, previous: $previous, next: $next }) {
designCollection {
designs {
nodes {
...DesignListItem
}
}
}
errors
}
}
......@@ -2,6 +2,7 @@
import { GlLoadingIcon, GlButton, GlAlert } from '@gitlab/ui';
import createFlash from '~/flash';
import { s__, sprintf } from '~/locale';
import VueDraggable from 'vuedraggable';
import UploadButton from '../components/upload/button.vue';
import DeleteButton from '../components/delete_button.vue';
import Design from '../components/list/item.vue';
......@@ -9,6 +10,7 @@ import DesignDestroyer from '../components/design_destroyer.vue';
import DesignVersionDropdown from '../components/upload/design_version_dropdown.vue';
import DesignDropzone from '../components/upload/design_dropzone.vue';
import uploadDesignMutation from '../graphql/mutations/upload_design.mutation.graphql';
import moveDesignMutation from '../graphql/mutations/move_design.mutation.graphql';
import permissionsQuery from '../graphql/queries/design_permissions.query.graphql';
import getDesignListQuery from '../graphql/queries/get_design_list.query.graphql';
import allDesignsMixin from '../mixins/all_designs';
......@@ -16,13 +18,18 @@ import {
UPLOAD_DESIGN_ERROR,
EXISTING_DESIGN_DROP_MANY_FILES_MESSAGE,
EXISTING_DESIGN_DROP_INVALID_FILENAME_MESSAGE,
MOVE_DESIGN_ERROR,
designUploadSkippedWarning,
designDeletionError,
} from '../utils/error_messages';
import { updateStoreAfterUploadDesign } from '../utils/cache_update';
import {
updateStoreAfterUploadDesign,
updateDesignsOnStoreAfterReorder,
} from '../utils/cache_update';
import {
designUploadOptimisticResponse,
isValidDesignFile,
moveDesignOptimisticResponse,
} from '../utils/design_management_utils';
import { getFilename } from '~/lib/utils/file_upload';
import { DESIGNS_ROUTE_NAME } from '../router/constants';
......@@ -40,6 +47,7 @@ export default {
DesignVersionDropdown,
DeleteButton,
DesignDropzone,
VueDraggable,
},
mixins: [allDesignsMixin],
apollo: {
......@@ -61,6 +69,8 @@ export default {
},
filesToBeSaved: [],
selectedDesigns: [],
isDraggingDesign: false,
reorderedDesigns: null,
};
},
computed: {
......@@ -254,11 +264,48 @@ export default {
toggleOffPasteListener() {
document.removeEventListener('paste', this.onDesignPaste);
},
designMoveVariables(newIndex, element) {
const variables = {
id: element.id,
};
if (newIndex > 0) {
variables.previous = this.reorderedDesigns[newIndex - 1].id;
}
if (newIndex < this.reorderedDesigns.length - 1) {
variables.next = this.reorderedDesigns[newIndex + 1].id;
}
return variables;
},
reorderDesigns({ moved: { newIndex, element } }) {
this.$apollo
.mutate({
mutation: moveDesignMutation,
variables: this.designMoveVariables(newIndex, element),
update: (store, { data: { designManagementMove } }) => {
return updateDesignsOnStoreAfterReorder(
store,
designManagementMove,
this.projectQueryBody,
);
},
optimisticResponse: moveDesignOptimisticResponse(this.reorderedDesigns),
})
.catch(() => {
createFlash(MOVE_DESIGN_ERROR);
});
},
onDesignMove(designs) {
this.reorderedDesigns = designs;
},
},
beforeRouteUpdate(to, from, next) {
this.selectedDesigns = [];
next();
},
dragOptions: {
animation: 200,
ghostClass: 'gl-visibility-hidden',
},
};
</script>
......@@ -312,20 +359,35 @@ export default {
<gl-alert v-else-if="error" variant="danger" :dismissible="false">
{{ __('An error occurred while loading designs. Please try again.') }}
</gl-alert>
<ol v-else class="list-unstyled row">
<li :class="designDropzoneWrapperClass" data-testid="design-dropzone-wrapper">
<design-dropzone
:class="{ 'design-list-item design-list-item-new': !isDesignListEmpty }"
:has-designs="hasDesigns"
@change="onUploadDesign"
/>
</li>
<li v-for="design in designs" :key="design.id" class="col-md-6 col-lg-3 gl-mb-3">
<vue-draggable
v-else
:value="designs"
:disabled="!isLatestVersion"
v-bind="$options.dragOptions"
tag="ol"
draggable=".js-design-tile"
class="list-unstyled row"
@start="isDraggingDesign = true"
@end="isDraggingDesign = false"
@change="reorderDesigns"
@input="onDesignMove"
>
<li
v-for="design in designs"
:key="design.id"
class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
<design-dropzone
:has-designs="hasDesigns"
:is-dragging-design="isDraggingDesign"
@change="onExistingDesignDropzoneChange($event, design.filename)"
><design v-bind="design" :is-uploading="isDesignToBeSaved(design.filename)"
/></design-dropzone>
>
<design
v-bind="design"
:is-uploading="isDesignToBeSaved(design.filename)"
class="gl-bg-white"
/>
</design-dropzone>
<input
v-if="canSelectDesign(design.filename)"
......@@ -335,7 +397,17 @@ export default {
@change="changeSelectedDesigns(design.filename)"
/>
</li>
</ol>
<template #header>
<li :class="designDropzoneWrapperClass" data-testid="design-dropzone-wrapper">
<design-dropzone
:is-dragging-design="isDraggingDesign"
:class="{ 'design-list-item design-list-item-new': !isDesignListEmpty }"
:has-designs="hasDesigns"
@change="onUploadDesign"
/>
</li>
</template>
</vue-draggable>
</div>
<router-view :key="$route.fullPath" />
</div>
......
......@@ -203,6 +203,15 @@ const addNewDesignToStore = (store, designManagementUpload, query) => {
});
};
const moveDesignInStore = (store, designManagementMove, query) => {
const data = store.readQuery(query);
data.project.issue.designCollection.designs = designManagementMove.designCollection.designs;
store.writeQuery({
...query,
data,
});
};
const onError = (data, message) => {
createFlash(message);
throw new Error(data.errors);
......@@ -264,3 +273,11 @@ export const updateStoreAfterUploadDesign = (store, data, query) => {
addNewDesignToStore(store, data, query);
}
};
export const updateDesignsOnStoreAfterReorder = (store, data, query) => {
if (hasErrors(data)) {
createFlash(data.errors[0]);
} else {
moveDesignInStore(store, data, query);
}
};
......@@ -85,7 +85,8 @@ export const designUploadOptimisticResponse = files => {
/**
* Generates optimistic response for a design upload mutation
* @param {Array<File>} files
* @param {Object} note
* @param {Object} position
*/
export const updateImageDiffNoteOptimisticResponse = (note, { position }) => ({
// False positive i18n lint: https://gitlab.com/gitlab-org/frontend/eslint-plugin-i18n/issues/26
......@@ -104,6 +105,27 @@ export const updateImageDiffNoteOptimisticResponse = (note, { position }) => ({
},
});
/**
* Generates optimistic response for a design upload mutation
* @param {Array} designs
*/
export const moveDesignOptimisticResponse = designs => ({
// False positive i18n lint: https://gitlab.com/gitlab-org/frontend/eslint-plugin-i18n/issues/26
// eslint-disable-next-line @gitlab/require-i18n-strings
__typename: 'Mutation',
designManagementMove: {
__typename: 'DesignManagementMovePayload',
designCollection: {
__typename: 'DesignCollection',
designs: {
__typename: 'DesignConnection',
nodes: designs,
},
},
errors: [],
},
});
const normalizeAuthor = author => ({
...author,
web_url: author.webUrl,
......
......@@ -40,6 +40,10 @@ export const EXISTING_DESIGN_DROP_INVALID_FILENAME_MESSAGE = __(
'You must upload a file with the same file name when dropping onto an existing design.',
);
export const MOVE_DESIGN_ERROR = __(
'Something went wrong when reordering designs. Please try again',
);
const MAX_SKIPPED_FILES_LISTINGS = 5;
const oneDesignSkippedMessage = filename =>
......
......@@ -242,6 +242,7 @@ export default {
<gl-button
class="gl-my-3 gl-mr-5 create-incident-button"
data-testid="createIncidentBtn"
data-qa-selector="create_incident_button"
:loading="redirecting"
:disabled="redirecting"
category="primary"
......
......@@ -23,6 +23,8 @@ import IssueAssignees from '~/vue_shared/components/issue/issue_assignees.vue';
import { isScopedLabel } from '~/lib/utils/common_utils';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { convertToCamelCase } from '~/lib/utils/text_utility';
export default {
i18n: {
openedAgo: __('opened %{timeAgoString} by %{user}'),
......@@ -34,6 +36,8 @@ export default {
GlLabel,
GlIcon,
GlSprintf,
IssueHealthStatus: () =>
import('ee_component/related_items_tree/components/issue_health_status.vue'),
},
directives: {
GlTooltip,
......@@ -195,6 +199,9 @@ export default {
},
];
},
healthStatus() {
return convertToCamelCase(this.issuable.health_status);
},
},
mounted() {
// TODO: Refactor user popover to use its own component instead of
......@@ -288,7 +295,7 @@ export default {
</div>
<div class="issuable-info">
<span class="js-ref-path">
<span class="js-ref-path gl-mr-4 mr-sm-0">
<span
v-if="isJiraIssue"
class="svg-container jira-logo-container"
......@@ -298,7 +305,7 @@ export default {
{{ referencePath }}
</span>
<span data-testid="openedByMessage" class="gl-display-none d-sm-inline-block gl-mr-2">
<span data-testid="openedByMessage" class="gl-display-none d-sm-inline-block gl-mr-4">
&middot;
<gl-sprintf
:message="isJiraIssue ? $options.i18n.openedAgoJira : $options.i18n.openedAgo"
......@@ -321,7 +328,7 @@ export default {
<gl-link
v-if="issuable.milestone"
v-gl-tooltip
class="gl-display-none d-sm-inline-block gl-mr-2 js-milestone"
class="gl-display-none d-sm-inline-block gl-mr-4 js-milestone milestone"
:href="milestoneLink"
:title="milestoneTooltipText"
>
......@@ -332,7 +339,7 @@ export default {
<span
v-if="dueDate"
v-gl-tooltip
class="gl-display-none d-sm-inline-block gl-mr-2 js-due-date"
class="gl-display-none d-sm-inline-block gl-mr-4 js-due-date"
:class="{ cred: isOverdue }"
:title="__('Due date')"
>
......@@ -340,6 +347,24 @@ export default {
{{ dueDateWords }}
</span>
<span
v-if="hasWeight"
v-gl-tooltip
:title="__('Weight')"
class="gl-display-none d-sm-inline-block gl-mr-4"
data-testid="weight"
data-qa-selector="issuable_weight_content"
>
<gl-icon name="weight" class="align-text-bottom" />
{{ issuable.weight }}
</span>
<issue-health-status
v-if="issuable.health_status"
:health-status="healthStatus"
class="gl-mr-4 issuable-tag-valign"
/>
<gl-label
v-for="label in issuable.labels"
:key="label.id"
......@@ -351,21 +376,9 @@ export default {
:title="label.name"
:scoped="isScoped(label)"
size="sm"
class="gl-mr-2"
class="gl-mr-2 issuable-tag-valign"
>{{ label.name }}</gl-label
>
<span
v-if="hasWeight"
v-gl-tooltip
:title="__('Weight')"
class="gl-display-none d-sm-inline-block"
data-testid="weight"
data-qa-selector="issuable_weight_content"
>
<gl-icon name="weight" class="align-text-bottom" />
{{ issuable.weight }}
</span>
</div>
</div>
......
......@@ -45,7 +45,7 @@ export default {
};
</script>
<template>
<gl-new-dropdown :text="$options.labels.defaultLabel" category="primary" variant="info">
<gl-new-dropdown right :text="$options.labels.defaultLabel" category="primary" variant="info">
<div class="pb-2 mx-1">
<template v-if="sshLink">
<gl-new-dropdown-header>{{ $options.labels.ssh }}</gl-new-dropdown-header>
......
......@@ -81,27 +81,6 @@ $item-remove-button-space: 42px;
max-width: 0;
}
.status {
&-at-risk {
color: $red-500;
background-color: $red-100;
}
&-needs-attention {
color: $orange-700;
background-color: $orange-100;
}
&-on-track {
color: $green-600;
background-color: $green-100;
}
}
.gl-label-text {
font-weight: $gl-font-weight-bold;
}
.bullet-separator {
font-size: 9px;
color: $gray-200;
......
......@@ -804,6 +804,10 @@
}
}
}
.milestone {
color: $gray-700;
}
}
@media(max-width: map-get($grid-breakpoints, lg)-1) {
......
......@@ -37,8 +37,8 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:file_identifier_hash)
push_frontend_feature_flag(:batch_suggestions, @project, default_enabled: true)
push_frontend_feature_flag(:auto_expand_collapsed_diffs, @project, default_enabled: true)
push_frontend_feature_flag(:hide_jump_to_next_unresolved_in_threads, @project)
push_frontend_feature_flag(:approvals_commented_by, @project, default_enabled: true)
push_frontend_feature_flag(:hide_jump_to_next_unresolved_in_threads, default_enabled: true)
end
before_action do
......
# frozen_string_literal: true
module Mutations
module Boards
module Lists
class Base < BaseMutation
include Mutations::ResolvesIssuable
argument :board_id, ::Types::GlobalIDType[::Board],
required: true,
description: 'The Global ID of the issue board to mutate'
field :list,
Types::BoardListType,
null: true,
description: 'List of the issue board'
authorize :admin_list
private
def find_object(id:)
GitlabSchema.object_from_id(id, expected_type: ::Board)
end
end
end
end
end
# frozen_string_literal: true
module Mutations
module Boards
module Lists
class Create < Base
graphql_name 'BoardListCreate'
argument :backlog, GraphQL::BOOLEAN_TYPE,
required: false,
description: 'Create the backlog list'
argument :label_id, ::Types::GlobalIDType[::Label],
required: false,
description: 'ID of an existing label'
def ready?(**args)
if args.slice(*mutually_exclusive_args).size != 1
arg_str = mutually_exclusive_args.map { |x| x.to_s.camelize(:lower) }.join(' or ')
raise Gitlab::Graphql::Errors::ArgumentError, "one and only one of #{arg_str} is required"
end
super
end
def resolve(**args)
board = authorized_find!(id: args[:board_id])
params = create_list_params(args)
authorize_list_type_resource!(board, params)
list = create_list(board, params)
{
list: list.valid? ? list : nil,
errors: errors_on_object(list)
}
end
private
def authorize_list_type_resource!(board, params)
return unless params[:label_id]
labels = ::Labels::AvailableLabelsService.new(current_user, board.resource_parent, params)
.filter_labels_ids_in_param(:label_id)
unless labels.present?
raise Gitlab::Graphql::Errors::ArgumentError, 'Label not found!'
end
end
def create_list(board, params)
create_list_service =
::Boards::Lists::CreateService.new(board.resource_parent, current_user, params)
create_list_service.execute(board)
end
def create_list_params(args)
params = args.slice(*mutually_exclusive_args).with_indifferent_access
params[:label_id] = GitlabSchema.parse_gid(params[:label_id]).model_id if params[:label_id]
params
end
def mutually_exclusive_args
[:backlog, :label_id]
end
end
end
end
end
......@@ -15,6 +15,7 @@ module Types
mount_mutation Mutations::AwardEmojis::Remove
mount_mutation Mutations::AwardEmojis::Toggle
mount_mutation Mutations::Boards::Issues::IssueMoveList
mount_mutation Mutations::Boards::Lists::Create
mount_mutation Mutations::Branches::Create, calls_gitaly: true
mount_mutation Mutations::Commits::Create, calls_gitaly: true
mount_mutation Mutations::Discussions::ToggleResolve
......
......@@ -418,22 +418,6 @@ module Ci
false
end
def ordered_stages
if ::Gitlab::Ci::Features.atomic_processing?(project)
# The `Ci::Stage` contains all up-to date data
# as atomic processing updates all data in-bulk
stages
elsif complete?
# The `Ci::Stage` contains up-to date data only for `completed` pipelines
# this is due to asynchronous processing of pipeline, and stages possibly
# not updated inline with processing of pipeline
stages
else
# In other cases, we need to calculate stages dynamically
legacy_stages
end
end
def legacy_stages_using_sql
# TODO, this needs refactoring, see gitlab-foss#26481.
stages_query = statuses
......@@ -470,6 +454,7 @@ module Ci
triggered_pipelines.preload(:source_job)
end
# TODO: Remove usage of this method in templates
def legacy_stages
if ::Gitlab::Ci::Features.composite_status?(project)
legacy_stages_using_composite_status
......@@ -1048,10 +1033,6 @@ module Ci
@persistent_ref ||= PersistentRef.new(pipeline: self)
end
def find_successful_build_ids_by_names(names)
statuses.latest.success.where(name: names).pluck(:id)
end
def cacheable?
Ci::PipelineEnums.ci_config_sources.key?(config_source.to_sym)
end
......
......@@ -22,8 +22,22 @@ module Ci
validates :size, presence: true, numericality: { less_than_or_equal_to: FILE_SIZE_LIMIT }
validates :file_type, presence: true
mount_uploader :file, Ci::PipelineArtifactUploader
before_save :set_size, if: :file_changed?
after_save :update_file_store, if: :saved_change_to_file?
enum file_type: {
code_coverage: 1
}
def set_size
self.size = file.size
end
def update_file_store
# The file.object_store is set during `uploader.store!`
# which happens after object is inserted/updated
self.update_column(:file_store, file.object_store)
end
end
end
......@@ -100,9 +100,7 @@ class CommitStatus < ApplicationRecord
# will not be refreshed to pick the change
self.processed_will_change!
if !::Gitlab::Ci::Features.atomic_processing?(project)
self.processed = nil
elsif latest?
if latest?
self.processed = false # force refresh of all dependent ones
elsif retried?
self.processed = true # retried are considered to be already processed
......@@ -164,8 +162,7 @@ class CommitStatus < ApplicationRecord
next unless commit_status.project
commit_status.run_after_commit do
schedule_stage_and_pipeline_update
PipelineProcessWorker.perform_async(pipeline_id)
ExpireJobCacheWorker.perform_async(id)
end
end
......@@ -186,14 +183,6 @@ class CommitStatus < ApplicationRecord
select(:name)
end
def self.status_for_prior_stages(index, project:)
before_stage(index).latest.slow_composite_status(project: project) || 'success'
end
def self.status_for_names(names, project:)
where(name: names).latest.slow_composite_status(project: project) || 'success'
end
def self.update_as_processed!
# Marks items as processed
# we do not increase `lock_version`, as we are the one
......@@ -286,21 +275,6 @@ class CommitStatus < ApplicationRecord
def unrecoverable_failure?
script_failure? || missing_dependency_failure? || archived_failure? || scheduler_failure? || data_integrity_failure?
end
def schedule_stage_and_pipeline_update
if ::Gitlab::Ci::Features.atomic_processing?(project)
# Atomic Processing requires only single Worker
PipelineProcessWorker.perform_async(pipeline_id, [id])
else
if complete? || manual?
PipelineProcessWorker.perform_async(pipeline_id, [id])
else
PipelineUpdateWorker.perform_async(pipeline_id)
end
StageUpdateWorker.perform_async(stage_id)
end
end
end
CommitStatus.prepend_if_ee('::EE::CommitStatus')
......@@ -149,6 +149,7 @@ class MergeRequestDiff < ApplicationRecord
# All diff information is collected from repository after object is created.
# It allows you to override variables like head_commit_sha before getting diff.
after_create :save_git_content, unless: :importing?
after_create :set_count_columns
after_create_commit :set_as_latest_diff, unless: :importing?
after_save :update_external_diff_store
......@@ -642,6 +643,7 @@ class MergeRequestDiff < ApplicationRecord
rows = build_merge_request_diff_files(diff_collection)
create_merge_request_diff_files(rows)
self.class.uncached { merge_request_diff_files.reset }
end
# Set our state to 'overflow' to make the #empty? and #collected?
......@@ -657,12 +659,14 @@ class MergeRequestDiff < ApplicationRecord
def save_commits
MergeRequestDiffCommit.create_bulk(self.id, compare.commits.reverse)
self.class.uncached { merge_request_diff_commits.reset }
end
# merge_request_diff_commits.reset is preferred way to reload associated
# objects but it returns cached result for some reason in this case
# we can circumvent that by specifying that we need an uncached reload
commits = self.class.uncached { merge_request_diff_commits.reset }
self.commits_count = commits.size
def set_count_columns
update_columns(
commits_count: merge_request_diff_commits.size,
files_count: merge_request_diff_files.size
)
end
def repository
......
......@@ -25,7 +25,7 @@ class MergeRequests::PipelineEntity < Grape::Entity
pipeline.detailed_status(request.current_user)
end
expose :ordered_stages, as: :stages, using: StageEntity
expose :stages, using: StageEntity
end
# Coverage isn't always necessary (e.g. when displaying project pipelines in
......
......@@ -36,7 +36,7 @@ class PipelineEntity < Grape::Entity
expose :details do
expose :detailed_status, as: :status, with: DetailedStatusEntity
expose :ordered_stages, as: :stages, using: StageEntity
expose :stages, using: StageEntity
expose :duration
expose :finished_at
expose :name
......
......@@ -24,8 +24,8 @@ class TriggeredPipelineEntity < Grape::Entity
expose :details do
expose :detailed_status, as: :status, with: DetailedStatusEntity
expose :ordered_stages,
as: :stages, using: StageEntity,
expose :stages,
using: StageEntity,
if: -> (_, opts) { can_read_details? && expand?(opts) }
end
......
......@@ -32,7 +32,7 @@ module Ci
Ci::ProcessPipelineService
.new(pipeline)
.execute(nil, initial_process: true)
.execute
pipeline_created_counter.increment(source: :webide)
end
......
# frozen_string_literal: true
module Ci
module PipelineProcessing
class LegacyProcessingService
include Gitlab::Utils::StrongMemoize
attr_reader :pipeline
def initialize(pipeline)
@pipeline = pipeline
end
def execute(trigger_build_ids = nil, initial_process: false)
success = process_stages_for_stage_scheduling
# we evaluate dependent needs,
# only when the another job has finished
success = process_dag_builds_without_needs || success if initial_process
success = process_dag_builds_with_needs(trigger_build_ids) || success
@pipeline.update_legacy_status
success
end
private
def process_stages_for_stage_scheduling
stage_indexes_of_created_stage_scheduled_processables.flat_map do |index|
process_stage_for_stage_scheduling(index)
end.any?
end
def process_stage_for_stage_scheduling(index)
current_status = status_for_prior_stages(index)
return unless Ci::HasStatus::COMPLETED_STATUSES.include?(current_status)
created_stage_scheduled_processables_in_stage(index).find_each.select do |build|
process_build(build, current_status)
end.any?
end
def process_dag_builds_without_needs
created_processables.scheduling_type_dag.without_needs.each do |build|
process_build(build, 'success')
end
end
def process_dag_builds_with_needs(trigger_build_ids)
return false unless trigger_build_ids.present?
# we find processables that are dependent:
# 1. because of current dependency,
trigger_build_names = pipeline.processables.latest
.for_ids(trigger_build_ids).names
# 2. does not have builds that not yet complete
incomplete_build_names = pipeline.processables.latest
.incomplete.names
# Each found processable is guaranteed here to have completed status
created_processables
.scheduling_type_dag
.with_needs(trigger_build_names)
.without_needs(incomplete_build_names)
.find_each
.map(&method(:process_dag_build_with_needs))
.any?
end
def process_dag_build_with_needs(build)
current_status = status_for_build_needs(build.needs.map(&:name))
return unless Ci::HasStatus::COMPLETED_STATUSES.include?(current_status)
process_build(build, current_status)
end
def process_build(build, current_status)
Gitlab::OptimisticLocking.retry_lock(build) do |subject|
Ci::ProcessBuildService.new(project, subject.user)
.execute(subject, current_status)
end
end
def status_for_prior_stages(index)
pipeline.processables.status_for_prior_stages(index, project: pipeline.project)
end
def status_for_build_needs(needs)
pipeline.processables.status_for_names(needs, project: pipeline.project)
end
# rubocop: disable CodeReuse/ActiveRecord
def stage_indexes_of_created_stage_scheduled_processables
created_stage_scheduled_processables.order(:stage_idx)
.pluck(Arel.sql('DISTINCT stage_idx'))
end
# rubocop: enable CodeReuse/ActiveRecord
def created_stage_scheduled_processables_in_stage(index)
created_stage_scheduled_processables
.with_preloads
.for_stage(index)
end
def created_stage_scheduled_processables
created_processables.scheduling_type_stage
end
def created_processables
pipeline.processables.created
end
def project
pipeline.project
end
end
end
end
......@@ -8,20 +8,14 @@ module Ci
@pipeline = pipeline
end
def execute(trigger_build_ids = nil, initial_process: false)
def execute
increment_processing_counter
update_retried
if ::Gitlab::Ci::Features.atomic_processing?(pipeline.project)
Ci::PipelineProcessing::AtomicProcessingService
.new(pipeline)
.execute
else
Ci::PipelineProcessing::LegacyProcessingService
.new(pipeline)
.execute(trigger_build_ids, initial_process: initial_process)
end
Ci::PipelineProcessing::AtomicProcessingService
.new(pipeline)
.execute
end
def metrics
......
......@@ -22,12 +22,6 @@ module Ci
needs += build.needs.map(&:name)
end
# In a DAG, the dependencies may have already completed. Figure out
# which builds have succeeded and use them to update the pipeline. If we don't
# do this, then builds will be stuck in the created state since their dependencies
# will never run.
completed_build_ids = pipeline.find_successful_build_ids_by_names(needs) if needs.any?
pipeline.builds.latest.skipped.find_each do |skipped|
retry_optimistic_lock(skipped) { |build| build.process }
end
......@@ -38,7 +32,7 @@ module Ci
Ci::ProcessPipelineService
.new(pipeline)
.execute(completed_build_ids, initial_process: true)
.execute
end
end
end
......@@ -30,7 +30,7 @@ module Labels
end
def filter_labels_ids_in_param(key)
ids = params[key].to_a
ids = Array.wrap(params[key])
return [] if ids.empty?
# rubocop:disable CodeReuse/ActiveRecord
......
# frozen_string_literal: true
module Ci
class PipelineArtifactUploader < GitlabUploader
include ObjectStorage::Concern
storage_options Gitlab.config.artifacts
alias_method :upload, :model
def store_dir
dynamic_segment
end
private
def dynamic_segment
Gitlab::HashedPath.new('pipelines', model.pipeline_id, 'artifacts', model.id, root_hash: model.project_id)
end
end
end
......@@ -236,7 +236,7 @@
- if project_nav_tab?(:incidents)
= nav_link(controller: :incidents) do
= link_to project_incidents_path(@project), title: _('Incidents') do
= link_to project_incidents_path(@project), title: _('Incidents'), data: { qa_selector: 'operations_incidents_link' } do
%span
= _('Incidents')
......
......@@ -10,11 +10,13 @@ class PipelineProcessWorker # rubocop:disable Scalability/IdempotentWorker
loggable_arguments 1
# rubocop: disable CodeReuse/ActiveRecord
def perform(pipeline_id, build_ids = nil)
# `_build_ids` is deprecated and will be removed in 14.0
# See: https://gitlab.com/gitlab-org/gitlab/-/issues/232806
def perform(pipeline_id, _build_ids = nil)
Ci::Pipeline.find_by(id: pipeline_id).try do |pipeline|
Ci::ProcessPipelineService
.new(pipeline)
.execute(build_ids)
.execute
end
end
# rubocop: enable CodeReuse/ActiveRecord
......
# frozen_string_literal: true
# This worker is deprecated and will be removed in 14.0
# See: https://gitlab.com/gitlab-org/gitlab/-/issues/232806
class PipelineUpdateWorker
include ApplicationWorker
include PipelineQueue
......@@ -9,7 +11,7 @@ class PipelineUpdateWorker
idempotent!
def perform(pipeline_id)
Ci::Pipeline.find_by_id(pipeline_id)&.update_legacy_status
def perform(_pipeline_id)
# no-op
end
end
---
title: Remove Jump to next unresolved thread button in merge request threads
merge_request: 38375
author:
type: deprecated
---
title: 'Add mutation to create a label or default backlog list for an issue board'
merge_request: 31233
author:
type: added
---
title: Remove legacy pipeline processing service and FF ci_atomic_processing
merge_request: 37339
author:
type: other
---
title: Add a cache column for the number of changed files in a merge request diff
merge_request: 38936
author:
type: changed
---
title: Right-aligned Clone dropdown for snippets
merge_request: 39446
author:
type: fixed
---
title: Resolve Allow the ability to re-order designs
merge_request: 37686
author:
type: added
---
title: Add database migrations to ensure Geo replicates all package files when sync
object storage is disabled
merge_request: 38822
author:
type: added
require './spec/support/sidekiq_middleware'
Gitlab::Seeder.quiet do
admin_user = User.find(1)
admin_user = User.admins.first
Project.not_mass_generated.each do |project|
params = {
......
# frozen_string_literal: true
class AddDefaultValueForFileStoreToPackageFiles < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
with_lock_retries do
change_column_default :packages_package_files, :file_store, 1
end
end
def down
with_lock_retries do
change_column_default :packages_package_files, :file_store, nil
end
end
end
# frozen_string_literal: true
class AddFileCountToMergeRequestDiffs < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
add_column :merge_request_diffs, :files_count, :smallint
end
end
def down
with_lock_retries do
remove_column :merge_request_diffs, :files_count, :smallint
end
end
end
# frozen_string_literal: true
class AddNotNullConstraintOnFileStoreToPackageFiles < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_not_null_constraint(:packages_package_files, :file_store, validate: false)
end
def down
remove_not_null_constraint(:packages_package_files, :file_store)
end
end
# frozen_string_literal: true
class AddPartialIndexOnIdToPackageFiles < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'index_packages_package_files_file_store_is_null'
disable_ddl_transaction!
def up
add_concurrent_index :packages_package_files, :id, where: 'file_store IS NULL', name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :packages_package_files, INDEX_NAME
end
end
# frozen_string_literal: true
class MigrateNullPackageFilesFileStoreToLocalValue < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
JOB_INTERVAL = 2.minutes + 5.seconds
BATCH_SIZE = 5_000
MIGRATION = 'SetNullPackageFilesFileStoreToLocalValue'
disable_ddl_transaction!
class PackageFile < ActiveRecord::Base
self.table_name = 'packages_package_files'
include ::EachBatch
end
def up
# On GitLab.com, there are 2M package files. None have NULL file_store
# because they are all object stored. This is a no-op for GitLab.com.
#
# If a customer had 2M package files with NULL file_store, with batches of
# 5000 and a background migration job interval of 2m 5s, then 400 jobs would
# be scheduled over 14 hours.
#
# The index `index_packages_package_files_file_store_is_null` is
# expected to be used here and in the jobs.
#
# queue_background_migration_jobs_by_range_at_intervals is not used because
# it would enqueue 18.6K jobs and we have an index for getting these ranges.
PackageFile.where(file_store: nil).each_batch(of: BATCH_SIZE) do |batch, index|
range = batch.pluck(Arel.sql("MIN(id)"), Arel.sql("MAX(id)")).first
delay = index * JOB_INTERVAL
migrate_in(delay.seconds, MIGRATION, [*range])
end
end
def down
# noop
end
end
# frozen_string_literal: true
class BackfillMergeRequestDiffsFilesCounts < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
# There are ~72 million records on GitLab.com at time of writing, so go fast
BATCH_SIZE = 10_000
DELAY_INTERVAL = 2.minutes.to_i
MIGRATION = 'SetMergeRequestDiffFilesCount'
disable_ddl_transaction!
class MergeRequestDiff < ActiveRecord::Base
include EachBatch
self.table_name = 'merge_request_diffs'
end
def up
queue_background_migration_jobs_by_range_at_intervals(
MergeRequestDiff, MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE
)
end
def down
# no-op
end
end
4b1f048dfaea1887b20fdc421a08ab6206ab9944201e6517e808b27214be926c
\ No newline at end of file
acc9d1ab79e277e55910b17c5653088721371fa12b2cf1a5677035fe3b422fc8
\ No newline at end of file
b05401408faabafa4c2ef5fd241fea88f78fd423a3c462be89ccdbd8d988cfc8
\ No newline at end of file
5467b9c38186a30d333d9ccff0aeb4a06f97f17fec12488aca5e0a619b11831b
\ No newline at end of file
fdb18abe24003a75c76019fccf5ca0aa65a900bc2a86eb578012bc5a032730cf
\ No newline at end of file
5152e094538b498fbe28f3fd11f6c1b9a9c77dc89ac0079cb39a6090a567db68
\ No newline at end of file
......@@ -13068,7 +13068,8 @@ CREATE TABLE public.merge_request_diffs (
commits_count integer,
external_diff character varying,
external_diff_store integer DEFAULT 1,
stored_externally boolean
stored_externally boolean,
files_count smallint
);
CREATE SEQUENCE public.merge_request_diffs_id_seq
......@@ -13873,7 +13874,7 @@ CREATE TABLE public.packages_package_files (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
size bigint,
file_store integer,
file_store integer DEFAULT 1,
file_md5 bytea,
file_sha1 bytea,
file_name character varying NOT NULL,
......@@ -17698,6 +17699,9 @@ ALTER TABLE public.design_management_designs
ALTER TABLE public.vulnerability_scanners
ADD CONSTRAINT check_37608c9db5 CHECK ((char_length(vendor) <= 255)) NOT VALID;
ALTER TABLE public.packages_package_files
ADD CONSTRAINT check_4c5e6bb0b3 CHECK ((file_store IS NOT NULL)) NOT VALID;
ALTER TABLE public.merge_request_diffs
ADD CONSTRAINT check_93ee616ac9 CHECK ((external_diff_store IS NOT NULL)) NOT VALID;
......@@ -20204,6 +20208,8 @@ CREATE INDEX index_packages_maven_metadata_on_package_id_and_path ON public.pack
CREATE INDEX index_packages_nuget_dl_metadata_on_dependency_link_id ON public.packages_nuget_dependency_link_metadata USING btree (dependency_link_id);
CREATE INDEX index_packages_package_files_file_store_is_null ON public.packages_package_files USING btree (id) WHERE (file_store IS NULL);
CREATE INDEX index_packages_package_files_on_file_store ON public.packages_package_files USING btree (file_store);
CREATE INDEX index_packages_package_files_on_package_id_and_file_name ON public.packages_package_files USING btree (package_id, file_name);
......
......@@ -5,7 +5,7 @@ group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
---
# Reference architecture: up to 10,000 users
# Reference architecture: up to 10,000 users **(PREMIUM ONLY)**
This page describes GitLab reference architecture for up to 10,000 users. For a
full list of reference architectures, see
......
......@@ -4,7 +4,7 @@ group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
---
# Reference architecture: up to 1,000 users
# Reference architecture: up to 1,000 users **(CORE ONLY)**
This page describes GitLab reference architecture for up to 1,000 users. For a
full list of reference architectures, see
......
......@@ -5,7 +5,7 @@ group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
---
# Reference architecture: up to 25,000 users
# Reference architecture: up to 25,000 users **(PREMIUM ONLY)**
This page describes GitLab reference architecture for up to 25,000 users. For a
full list of reference architectures, see
......
......@@ -5,7 +5,7 @@ group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
---
# Reference architecture: up to 2,000 users
# Reference architecture: up to 2,000 users **(CORE ONLY)**
This page describes GitLab reference architecture for up to 2,000 users.
For a full list of reference architectures, see
......
......@@ -5,7 +5,7 @@ group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
---
# Reference architecture: up to 3,000 users
# Reference architecture: up to 3,000 users **(PREMIUM ONLY)**
This page describes GitLab reference architecture for up to 3,000 users. For a
full list of reference architectures, see
......
......@@ -5,7 +5,7 @@ group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
---
# Reference architecture: up to 50,000 users
# Reference architecture: up to 50,000 users **(PREMIUM ONLY)**
This page describes GitLab reference architecture for up to 50,000 users. For a
full list of reference architectures, see
......
......@@ -5,7 +5,7 @@ group: Distribution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers
---
# Reference architecture: up to 5,000 users
# Reference architecture: up to 5,000 users **(PREMIUM ONLY)**
This page describes GitLab reference architecture for up to 5,000 users. For a
full list of reference architectures, see
......
......@@ -1165,6 +1165,11 @@ input BoardEpicIssueInput {
weight: String
}
"""
Identifier of Board
"""
scalar BoardID
"""
Represents a list for an issue board
"""
......@@ -1270,6 +1275,51 @@ type BoardListConnection {
pageInfo: PageInfo!
}
"""
Autogenerated input type of BoardListCreate
"""
input BoardListCreateInput {
"""
Create the backlog list
"""
backlog: Boolean
"""
The Global ID of the issue board to mutate
"""
boardId: BoardID!
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
ID of an existing label
"""
labelId: LabelID
}
"""
Autogenerated return type of BoardListCreate
"""
type BoardListCreatePayload {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Errors encountered during execution of the mutation.
"""
errors: [String!]!
"""
List of the issue board
"""
list: BoardList
}
"""
An edge in a connection.
"""
......@@ -8108,6 +8158,11 @@ type LabelEdge {
node: Label
}
"""
Identifier of Label
"""
scalar LabelID
"""
List limit metric setting
"""
......@@ -9392,6 +9447,7 @@ type Mutation {
awardEmojiAdd(input: AwardEmojiAddInput!): AwardEmojiAddPayload
awardEmojiRemove(input: AwardEmojiRemoveInput!): AwardEmojiRemovePayload
awardEmojiToggle(input: AwardEmojiToggleInput!): AwardEmojiTogglePayload
boardListCreate(input: BoardListCreateInput!): BoardListCreatePayload
boardListUpdateLimitMetrics(input: BoardListUpdateLimitMetricsInput!): BoardListUpdateLimitMetricsPayload
commitCreate(input: CommitCreateInput!): CommitCreatePayload
configureSast(input: ConfigureSastInput!): ConfigureSastPayload
......
......@@ -3108,6 +3108,16 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "SCALAR",
"name": "BoardID",
"description": "Identifier of Board",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "BoardList",
......@@ -3407,6 +3417,128 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "BoardListCreateInput",
"description": "Autogenerated input type of BoardListCreate",
"fields": null,
"inputFields": [
{
"name": "boardId",
"description": "The Global ID of the issue board to mutate",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "BoardID",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "backlog",
"description": "Create the backlog list",
"type": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
},
"defaultValue": null
},
{
"name": "labelId",
"description": "ID of an existing label",
"type": {
"kind": "SCALAR",
"name": "LabelID",
"ofType": null
},
"defaultValue": null
},
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "BoardListCreatePayload",
"description": "Autogenerated return type of BoardListCreate",
"fields": [
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "errors",
"description": "Errors encountered during execution of the mutation.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "list",
"description": "List of the issue board",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "BoardList",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "BoardListEdge",
......@@ -22518,6 +22650,16 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "SCALAR",
"name": "LabelID",
"description": "Identifier of Label",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "ENUM",
"name": "ListLimitMetric",
......@@ -26501,6 +26643,33 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "boardListCreate",
"description": null,
"args": [
{
"name": "input",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "INPUT_OBJECT",
"name": "BoardListCreateInput",
"ofType": null
}
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "BoardListCreatePayload",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "boardListUpdateLimitMetrics",
"description": null,
......@@ -214,6 +214,16 @@ Represents a list for an issue board
| `position` | Int | Position of list within the board |
| `title` | String! | Title of the list |
## BoardListCreatePayload
Autogenerated return type of BoardListCreate
| Name | Type | Description |
| --- | ---- | ---------- |
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Errors encountered during execution of the mutation. |
| `list` | BoardList | List of the issue board |
## BoardListUpdateLimitMetricsPayload
Autogenerated return type of BoardListUpdateLimitMetrics
......
......@@ -179,6 +179,20 @@ the `weight` parameter:
]
```
Users on GitLab [Ultimate](https://about.gitlab.com/pricing/) will also see
the `health_status` parameter:
```json
[
{
"state" : "opened",
"description" : "Ratione dolores corrupti mollitia soluta quia.",
"health_status": "on_track",
...
}
]
```
**Note**: `assignee` column is deprecated, now we show it as a single-sized array `assignees` to conform to the GitLab EE API.
**Note**: The `closed_by` attribute was [introduced in GitLab 10.6](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17042). This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
......@@ -338,6 +352,20 @@ the `weight` parameter:
]
```
Users on GitLab [Ultimate](https://about.gitlab.com/pricing/) will also see
the `health_status` parameter:
```json
[
{
"project_id" : 4,
"description" : "Omnis vero earum sunt corporis dolor et placeat.",
"health_status": "at_risk",
...
}
]
```
**Note**: `assignee` column is deprecated, now we show it as a single-sized array `assignees` to conform to the GitLab EE API.
**Note**: The `closed_by` attribute was [introduced in GitLab 10.6](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17042). This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
......@@ -503,6 +531,20 @@ the `weight` parameter:
]
```
Users on GitLab [Ultimate](https://about.gitlab.com/pricing/) will also see
the `health_status` parameter:
```json
[
{
"project_id" : 4,
"description" : "Omnis vero earum sunt corporis dolor et placeat.",
"health_status": "at_risk",
...
}
]
```
**Note**: `assignee` column is deprecated, now we show it as a single-sized array `assignees` to conform to the GitLab EE API.
**Note**: The `closed_by` attribute was [introduced in GitLab 10.6](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17042). This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
......@@ -642,6 +684,20 @@ the `epic` property:
}
```
Users on GitLab [Ultimate](https://about.gitlab.com/pricing/) will also additionally see
the `health_status` property:
```json
[
{
"project_id" : 4,
"description" : "Omnis vero earum sunt corporis dolor et placeat.",
"health_status": "on_track",
...
}
]
```
**Note**: `assignee` column is deprecated, now we show it as a single-sized array `assignees` to conform to the GitLab EE API.
**Note**: The `closed_by` attribute was [introduced in GitLab 10.6](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17042). This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
......@@ -752,6 +808,20 @@ the `weight` parameter:
}
```
Users on GitLab [Ultimate](https://about.gitlab.com/pricing/) will also see
the `health_status` parameter:
```json
[
{
"project_id" : 4,
"description" : "Omnis vero earum sunt corporis dolor et placeat.",
"health_status": "on_track",
...
}
]
```
**Note**: `assignee` column is deprecated, now we show it as a single-sized array `assignees` to conform to the GitLab EE API.
**Note**: The `closed_by` attribute was [introduced in GitLab 10.6](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17042). This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
......@@ -874,6 +944,20 @@ the `weight` parameter:
}
```
Users on GitLab [Ultimate](https://about.gitlab.com/pricing/) will also see
the `health_status` parameter:
```json
[
{
"project_id" : 4,
"description" : "Omnis vero earum sunt corporis dolor et placeat.",
"health_status": "on_track",
...
}
]
```
NOTE: **Note:**
At least one of following parameters is required to be passed for the request to be successful: `:assignee_id`, `:assignee_ids`, `:confidential`, `:created_at`, `:description`, `:discussion_locked`, `:due_date`, `:labels`, `:milestone_id`, `:state_event`, or `:title`.
......@@ -1027,6 +1111,20 @@ the `weight` parameter:
}
```
Users on GitLab [Ultimate](https://about.gitlab.com/pricing/) will also see
the `health_status` parameter:
```json
[
{
"project_id" : 4,
"description" : "Omnis vero earum sunt corporis dolor et placeat.",
"health_status": "on_track",
...
}
]
```
**Note**: `assignee` column is deprecated, now we show it as a single-sized array `assignees` to conform to the GitLab EE API.
**Note**: The `closed_by` attribute was [introduced in GitLab 10.6](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17042). This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
......
......@@ -168,6 +168,9 @@ After you have these prerequisites ready, follow these steps:
1. Commit and push your updated `.gitlab-ci.yml` to your project's repository, and you're done!
Your application Docker image will be rebuilt and pushed to the GitLab registry.
If your image is located in a private registry, make sure your task definition is
[configured with a `repositoryCredentials` attribute](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/private-auth.html).
Then the targeted task definition will be updated with the location of the new
Docker image, and a new revision will be created in ECS as result.
......
......@@ -34,3 +34,5 @@ This dashboard displays CPU, memory, network and disk metrics for the pods in yo
[connected K8s cluster](../../../user/project/clusters/index.md). It provides a
[variable selector](templating_variables.md#metric_label_values-variable-type)
at the top of the dashboard to select which pod's metrics to display.
![K8s pod health dashboard](img/k8s_pod_health_dashboard_v13_3.png)
......@@ -32,7 +32,7 @@ To create a new dashboard from the GitLab user interface:
1. Sign in to GitLab as a user with Maintainer or Owner
[permissions](../../../user/permissions.md#project-members-permissions).
1. Navigate to your dashboard at **Operations > Metrics**.
1. In the top-right corner of your dashboard, click the **{{ellipsis_v}}** **More actions** menu,
1. In the top-right corner of your dashboard, click the **{ellipsis_v}** **More actions** menu,
and select **Create new**:
![Monitoring Dashboard actions menu with create new item](img/actions_menu_create_new_dashboard_v13_3.png)
1. In the modal window, click **Open Repository**, then follow the instructions
......@@ -75,49 +75,26 @@ supported and won't be available in the UI.
## Add a new metrics panel to a dashboard
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/228761) in GitLab 13.3 behind a disabled [feature flag](../../../administration/feature_flags.md): `metrics_dashboard_new_panel_page`.
> UI option [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/228761) in GitLab 13.3.
The metrics dashboard supports various [multiple panel types](../../../operations/metrics/dashboards/panel_types.md).
You can quickly test how a panel configuration would display in your metrics dashboard
with the **Add Panel** page:
1. Sign in to GitLab as a user with Maintainer or Owner
[permissions](../../../user/permissions.md#project-members-permissions) on a
project that has the [feature flag enabled](#enable-or-disable-testing-metrics-panels).
1. Open the URL `https://example.com/PROJECT/-/metrics/panel/new`, replacing
`example.com` with your domain name, and `PROJECT` with the name of your project,
to display the panel configuration page.
[permissions](../../../user/permissions.md#project-members-permissions).
1. Click **Add panel** in the **{ellipsis_v}** **More actions** menu.
NOTE: **Note:**
You can add panel only to custom dashboards.
![Monitoring Dashboard actions menu with add panel item](img/actions_menu_create_add_panel_v13_3.png)
1. In the **Define and preview panel** section, paste in the YAML you want to
preview in the **Panel YAML** field.
1. Click **Preview panel**, and GitLab displays a preview of the chart below the
`Define and preview panel` section:
![Monitoring Dashboard Add Panel page](img/metrics_dashboard_panel_preview_v13_3.png)
### Enable or disable testing metrics panels
Testing metrics panels in the UI is under development and not ready for production use. It's
deployed behind a feature flag that's **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md)
can enable it for your instance. Testing metrics panels in the UI can be enabled or disabled per-project.
To enable it:
```ruby
# Instance-wide
Feature.enable(:metrics_dashboard_new_panel_page)
# or by project
Feature.enable(:metrics_dashboard_new_panel_page, Project.find(metrics_dashboard_new_panel_page))
```
To disable it:
```ruby
# Instance-wide
Feature.disable(:metrics_dashboard_new_panel_page)
# or by project
Feature.disable(:metrics_dashboard_new_panel_page, Project.find(metrics_dashboard_new_panel_page))
```
## Duplicate a GitLab-defined dashboard
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/37238) in GitLab 12.7.
......@@ -128,7 +105,7 @@ The resulting `.yml` file can be customized and adapted to your project.
You can decide to save the dashboard `.yml` file in the project's **default** branch or in a
new branch.
1. Click **Duplicate current dashboard** in the **{{ellipsis_v}}** **More actions** menu.
1. Click **Duplicate current dashboard** in the **{ellipsis_v}** **More actions** menu.
NOTE: **Note:**
You can duplicate only GitLab-defined dashboards.
......
......@@ -202,6 +202,17 @@ Only the latest version of the designs can be deleted.
Deleted designs are not permanently lost; they can be
viewed by browsing previous versions.
## Reordering designs
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/34382) in GitLab 13.3.
You can change designs order with dragging design to the new position:
![Reorder designs](img/designs_reordering_v13_3.gif)
NOTE: **Note:**
You can reorder designs only on the latest version.
## Starting discussions on designs
When a design is uploaded, you can start a discussion by clicking on
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Sets the MergeRequestDiff#files_count value for old rows
class SetMergeRequestDiffFilesCount
COUNT_SUBQUERY = <<~SQL
files_count = (
SELECT count(*)
FROM merge_request_diff_files
WHERE merge_request_diff_files.merge_request_diff_id = merge_request_diffs.id
)
SQL
class MergeRequestDiff < ActiveRecord::Base # rubocop:disable Style/Documentation
include EachBatch
self.table_name = 'merge_request_diffs'
end
def perform(start_id, end_id)
MergeRequestDiff.where(id: start_id..end_id).each_batch do |relation|
relation.update_all(COUNT_SUBQUERY)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class is responsible for migrating a range of package files
# with file_store == NULL to 1.
#
# The index `index_packages_package_files_file_store_is_null` is
# expected to be used to find the rows here and in the migration scheduling
# the jobs that run this class.
class SetNullPackageFilesFileStoreToLocalValue
LOCAL_STORE = 1 # equal to ObjectStorage::Store::LOCAL
# Temporary AR class for package files
class PackageFile < ActiveRecord::Base
self.table_name = 'packages_package_files'
end
def perform(start_id, stop_id)
Packages::PackageFile.where(file_store: nil, id: start_id..stop_id).update_all(file_store: LOCAL_STORE)
end
end
end
end
......@@ -22,10 +22,6 @@ module Gitlab
::Feature.enabled?(:ci_composite_status, project, default_enabled: true)
end
def self.atomic_processing?(project)
::Feature.enabled?(:ci_atomic_processing, project, default_enabled: true)
end
def self.pipeline_latest?
::Feature.enabled?(:ci_pipeline_latest, default_enabled: true)
end
......
......@@ -10,7 +10,7 @@ module Gitlab
def perform!
::Ci::ProcessPipelineService
.new(@pipeline)
.execute(nil, initial_process: true)
.execute
end
def break?
......
......@@ -22639,6 +22639,9 @@ msgstr ""
msgid "Something went wrong trying to change the locked state of this %{issuableDisplayName}"
msgstr ""
msgid "Something went wrong when reordering designs. Please try again"
msgstr ""
msgid "Something went wrong when toggling the button"
msgstr ""
......
......@@ -354,6 +354,10 @@ module QA
module Metrics
autoload :Show, 'qa/page/project/operations/metrics/show'
end
module Incidents
autoload :Index, 'qa/page/project/operations/incidents/index'
end
end
module Wiki
......
......@@ -80,6 +80,10 @@ module QA
select_filter_with_text('Show history only')
end
def has_metrics_unfurled?
has_element?(:prometheus_graph_widgets, wait: 30)
end
private
def select_filter_with_text(text)
......
# frozen_string_literal: true
module QA
module Page
module Project
module Operations
module Incidents
class Index < Page::Base
view 'app/assets/javascripts/incidents/components/incidents_list.vue' do
element :create_incident_button
end
def create_incident
click_element :create_incident_button
end
end
end
end
end
end
end
......@@ -17,6 +17,7 @@ module QA
element :operations_link
element :operations_environments_link
element :operations_metrics_link
element :operations_incidents_link
end
end
end
......@@ -45,6 +46,14 @@ module QA
end
end
def go_to_operations_incidents
hover_operations do
within_submenu do
click_element(:operations_incidents_link)
end
end
end
private
def hover_operations
......
export const designListQueryResponse = {
data: {
project: {
id: '1',
issue: {
designCollection: {
designs: {
nodes: [
{
id: '1',
event: 'NONE',
filename: 'fox_1.jpg',
notesCount: 3,
image: 'image-1',
imageV432x230: 'image-1',
},
{
id: '2',
event: 'NONE',
filename: 'fox_2.jpg',
notesCount: 2,
image: 'image-2',
imageV432x230: 'image-2',
},
{
id: '3',
event: 'NONE',
filename: 'fox_3.jpg',
notesCount: 1,
image: 'image-3',
imageV432x230: 'image-3',
},
],
},
versions: {
nodes: [],
},
},
},
},
},
};
export const permissionsQueryResponse = {
data: {
project: {
id: '1',
issue: {
userPermissions: { createDesign: true },
},
},
},
};
export const reorderedDesigns = [
{
id: '2',
event: 'NONE',
filename: 'fox_2.jpg',
notesCount: 2,
image: 'image-2',
imageV432x230: 'image-2',
},
{
id: '1',
event: 'NONE',
filename: 'fox_1.jpg',
notesCount: 3,
image: 'image-1',
imageV432x230: 'image-1',
},
{
id: '3',
event: 'NONE',
filename: 'fox_3.jpg',
notesCount: 1,
image: 'image-3',
imageV432x230: 'image-3',
},
];
export const moveDesignMutationResponse = {
data: {
designManagementMove: {
designCollection: {
designs: {
nodes: [...reorderedDesigns],
},
},
errors: [],
},
},
};
export const moveDesignMutationResponseWithErrors = {
data: {
designManagementMove: {
designCollection: {
designs: {
nodes: [...reorderedDesigns],
},
},
errors: ['Houston, we have a problem'],
},
},
};
......@@ -22,14 +22,14 @@ exports[`Design management index page designs does not render toolbar when there
hasdesigns="true"
/>
</li>
<li
class="col-md-6 col-lg-3 gl-mb-3"
class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
<design-dropzone-stub
hasdesigns="true"
>
<design-stub
class="gl-bg-white"
event="NONE"
filename="design-1-name"
id="design-1"
......@@ -41,12 +41,13 @@ exports[`Design management index page designs does not render toolbar when there
<!---->
</li>
<li
class="col-md-6 col-lg-3 gl-mb-3"
class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
<design-dropzone-stub
hasdesigns="true"
>
<design-stub
class="gl-bg-white"
event="NONE"
filename="design-2-name"
id="design-2"
......@@ -58,12 +59,13 @@ exports[`Design management index page designs does not render toolbar when there
<!---->
</li>
<li
class="col-md-6 col-lg-3 gl-mb-3"
class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
<design-dropzone-stub
hasdesigns="true"
>
<design-stub
class="gl-bg-white"
event="NONE"
filename="design-3-name"
id="design-3"
......@@ -151,14 +153,14 @@ exports[`Design management index page designs renders designs list and header wi
hasdesigns="true"
/>
</li>
<li
class="col-md-6 col-lg-3 gl-mb-3"
class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
<design-dropzone-stub
hasdesigns="true"
>
<design-stub
class="gl-bg-white"
event="NONE"
filename="design-1-name"
id="design-1"
......@@ -173,12 +175,13 @@ exports[`Design management index page designs renders designs list and header wi
/>
</li>
<li
class="col-md-6 col-lg-3 gl-mb-3"
class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
<design-dropzone-stub
hasdesigns="true"
>
<design-stub
class="gl-bg-white"
event="NONE"
filename="design-2-name"
id="design-2"
......@@ -193,12 +196,13 @@ exports[`Design management index page designs renders designs list and header wi
/>
</li>
<li
class="col-md-6 col-lg-3 gl-mb-3"
class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
<design-dropzone-stub
hasdesigns="true"
>
<design-stub
class="gl-bg-white"
event="NONE"
filename="design-3-name"
id="design-3"
......@@ -296,7 +300,6 @@ exports[`Design management index page when has no designs renders design dropzon
class=""
/>
</li>
</ol>
</div>
......
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { createMockClient } from 'mock-apollo-client';
import VueApollo from 'vue-apollo';
import VueRouter from 'vue-router';
import VueDraggable from 'vuedraggable';
import Design from '~/design_management/components/list/item.vue';
import createRouter from '~/design_management/router';
import getDesignListQuery from '~/design_management/graphql/queries/get_design_list.query.graphql';
import permissionsQuery from '~/design_management/graphql/queries/design_permissions.query.graphql';
import moveDesignMutation from '~/design_management/graphql/mutations/move_design.mutation.graphql';
import createFlash from '~/flash';
import Index from '~/design_management/pages/index.vue';
import {
designListQueryResponse,
permissionsQueryResponse,
moveDesignMutationResponse,
reorderedDesigns,
moveDesignMutationResponseWithErrors,
} from '../mock_data/apollo_mock';
import { InMemoryCache } from 'apollo-cache-inmemory';
jest.mock('~/flash.js');
const localVue = createLocalVue();
localVue.use(VueApollo);
const router = createRouter();
localVue.use(VueRouter);
const designToMove = {
__typename: 'Design',
id: '2',
event: 'NONE',
filename: 'fox_2.jpg',
notesCount: 2,
image: 'image-2',
imageV432x230: 'image-2',
};
describe('Design management index page with Apollo mock', () => {
let wrapper;
let mockClient;
let apolloProvider;
let moveDesignHandler;
async function moveDesigns(localWrapper) {
await jest.runOnlyPendingTimers();
await localWrapper.vm.$nextTick();
localWrapper.find(VueDraggable).vm.$emit('input', reorderedDesigns);
localWrapper.find(VueDraggable).vm.$emit('change', {
moved: {
newIndex: 0,
element: designToMove,
},
});
}
const fragmentMatcher = { match: () => true };
const cache = new InMemoryCache({
fragmentMatcher,
addTypename: false,
});
const findDesigns = () => wrapper.findAll(Design);
function createComponent({
moveHandler = jest.fn().mockResolvedValue(moveDesignMutationResponse),
}) {
mockClient = createMockClient({ cache });
mockClient.setRequestHandler(
getDesignListQuery,
jest.fn().mockResolvedValue(designListQueryResponse),
);
mockClient.setRequestHandler(
permissionsQuery,
jest.fn().mockResolvedValue(permissionsQueryResponse),
);
moveDesignHandler = moveHandler;
mockClient.setRequestHandler(moveDesignMutation, moveDesignHandler);
apolloProvider = new VueApollo({
defaultClient: mockClient,
});
wrapper = shallowMount(Index, {
localVue,
apolloProvider,
router,
stubs: { VueDraggable },
});
}
afterEach(() => {
wrapper.destroy();
wrapper = null;
mockClient = null;
apolloProvider = null;
});
it('has a design with id 1 as a first one', async () => {
createComponent({});
await jest.runOnlyPendingTimers();
await wrapper.vm.$nextTick();
expect(findDesigns()).toHaveLength(3);
expect(
findDesigns()
.at(0)
.props('id'),
).toBe('1');
});
it('calls a mutation with correct parameters and reorders designs', async () => {
createComponent({});
await moveDesigns(wrapper);
expect(moveDesignHandler).toHaveBeenCalled();
await wrapper.vm.$nextTick();
expect(
findDesigns()
.at(0)
.props('id'),
).toBe('2');
});
it('displays flash if mutation had a recoverable error', async () => {
createComponent({
moveHandler: jest.fn().mockResolvedValue(moveDesignMutationResponseWithErrors),
});
await moveDesigns(wrapper);
await wrapper.vm.$nextTick();
expect(createFlash).toHaveBeenCalledWith('Houston, we have a problem');
});
it('displays flash if mutation had a non-recoverable error', async () => {
createComponent({
moveHandler: jest.fn().mockRejectedValue('Error'),
});
await moveDesigns(wrapper);
await jest.runOnlyPendingTimers();
await wrapper.vm.$nextTick();
expect(createFlash).toHaveBeenCalledWith(
'Something went wrong when reordering designs. Please try again',
);
});
});
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { ApolloMutation } from 'vue-apollo';
import VueDraggable from 'vuedraggable';
import VueRouter from 'vue-router';
import { GlEmptyState } from '@gitlab/ui';
import Index from '~/design_management/pages/index.vue';
......@@ -108,7 +109,7 @@ describe('Design management index page', () => {
mocks: { $apollo },
localVue,
router,
stubs: { DesignDestroyer, ApolloMutation, ...stubs },
stubs: { DesignDestroyer, ApolloMutation, VueDraggable, ...stubs },
attachToDocument: true,
provide: {
projectPath: 'project-path',
......
......@@ -99,6 +99,7 @@ describe('Issuable component', () => {
const findIssuableTitle = () => wrapper.find('[data-testid="issuable-title"]');
const findIssuableStatus = () => wrapper.find('[data-testid="issuable-status"]');
const containsJiraLogo = () => wrapper.contains('[data-testid="jira-logo"]');
const findHealthStatus = () => wrapper.find('.health-status');
describe('when mounted', () => {
it('initializes user popovers', () => {
......@@ -474,4 +475,19 @@ describe('Issuable component', () => {
});
});
});
if (IS_EE) {
describe('with health status', () => {
it('renders health status tag', () => {
factory({ issuable });
expect(findHealthStatus().exists()).toBe(true);
});
it('does not render when health status is absent', () => {
issuable.health_status = null;
factory({ issuable });
expect(findHealthStatus().exists()).toBe(false);
});
});
}
});
......@@ -30,6 +30,7 @@ export const simpleIssue = {
references: {
relative: 'html-boilerplate#45',
},
health_status: 'on_track',
};
export const testLabels = [
......
......@@ -4,6 +4,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<gl-new-dropdown-stub
category="primary"
headertext=""
right=""
size="medium"
text="Clone"
variant="info"
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::Boards::Lists::Create do
include GraphqlHelpers
let_it_be(:group) { create(:group, :private) }
let_it_be(:board) { create(:board, group: group) }
let_it_be(:user) { create(:user) }
let_it_be(:guest) { create(:user) }
let(:current_user) { user }
let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
let(:list_create_params) { {} }
before_all do
group.add_reporter(user)
group.add_guest(guest)
end
subject { mutation.resolve(board_id: board.to_global_id.to_s, **list_create_params) }
describe '#ready?' do
it 'raises an error if required arguments are missing' do
expect { mutation.ready?({ board_id: 'some id' }) }
.to raise_error(Gitlab::Graphql::Errors::ArgumentError,
'one and only one of backlog or labelId is required')
end
it 'raises an error if too many required arguments are specified' do
expect { mutation.ready?({ board_id: 'some id', backlog: true, label_id: 'some label' }) }
.to raise_error(Gitlab::Graphql::Errors::ArgumentError,
'one and only one of backlog or labelId is required')
end
end
describe '#resolve' do
context 'with proper permissions' do
describe 'backlog list' do
let(:list_create_params) { { backlog: true } }
it 'creates one and only one backlog' do
expect { subject }.to change { board.lists.backlog.count }.from(0).to(1)
expect(board.lists.backlog.first.list_type).to eq 'backlog'
backlog_id = board.lists.backlog.first.id
expect { subject }.not_to change { board.lists.backlog.count }
expect(board.lists.backlog.last.id).to eq backlog_id
end
end
describe 'label list' do
let_it_be(:dev_label) do
create(:group_label, title: 'Development', color: '#FFAABB', group: group)
end
let(:list_create_params) { { label_id: dev_label.to_global_id.to_s } }
it 'creates a new issue board list for labels' do
expect { subject }.to change { board.lists.count }.from(1).to(2)
new_list = subject[:list]
expect(new_list.title).to eq dev_label.title
expect(new_list.position).to eq 0
end
end
end
context 'without proper permissions' do
let(:current_user) { guest }
it 'raises an error' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::SetMergeRequestDiffFilesCount, schema: 20200807152315 do
let(:merge_request_diff_files) { table(:merge_request_diff_files) }
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
let(:project) { projects.create!(namespace_id: namespace.id) }
let(:merge_request) { merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) }
it 'fills the files_count column' do
empty_diff = merge_request_diffs.create!(merge_request_id: merge_request.id)
filled_diff = merge_request_diffs.create!(merge_request_id: merge_request.id)
3.times do |n|
merge_request_diff_files.create!(
merge_request_diff_id: filled_diff.id,
relative_order: n,
new_file: false,
renamed_file: false,
deleted_file: false,
too_large: false,
a_mode: '',
b_mode: '',
old_path: '',
new_path: ''
)
end
described_class.new.perform(empty_diff.id, filled_diff.id)
expect(empty_diff.reload.files_count).to eq(0)
expect(filled_diff.reload.files_count).to eq(3)
end
end
# frozen_string_literal: true
require 'spec_helper'
# The test setup must begin before
# 20200806004742_add_not_null_constraint_on_file_store_to_package_files.rb
# has run, or else we cannot insert a row with `NULL` `file_store` to
# test against.
RSpec.describe Gitlab::BackgroundMigration::SetNullPackageFilesFileStoreToLocalValue, schema: 20200806004232 do
let!(:packages_package_files) { table(:packages_package_files) }
let!(:packages_packages) { table(:packages_packages) }
let!(:projects) { table(:projects) }
let!(:namespaces) { table(:namespaces) }
let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
let!(:project) { projects.create!(namespace_id: namespace.id) }
let!(:package) { packages_packages.create!(project_id: project.id, name: 'bar', package_type: 1) }
it 'correctly migrates nil file_store to 1' do
file_store_1 = packages_package_files.create!(file_store: 1, file_name: 'foo_1', file: 'foo_1', package_id: package.id)
file_store_2 = packages_package_files.create!(file_store: 2, file_name: 'foo_2', file: 'foo_2', package_id: package.id)
file_store_nil = packages_package_files.create!(file_store: nil, file_name: 'foo_nil', file: 'foo_nil', package_id: package.id)
described_class.new.perform(file_store_1.id, file_store_nil.id)
file_store_1.reload
file_store_2.reload
file_store_nil.reload
expect(file_store_1.file_store).to eq(1) # unchanged
expect(file_store_2.file_store).to eq(2) # unchanged
expect(file_store_nil.file_store).to eq(1) # nil => 1
end
end
......@@ -217,6 +217,7 @@ MergeRequestDiff:
- head_commit_sha
- start_commit_sha
- commits_count
- files_count
MergeRequestDiffCommit:
- merge_request_diff_id
- relative_order
......
......@@ -43,4 +43,34 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
end
end
end
describe '#set_size' do
subject { create(:ci_pipeline_artifact) }
context 'when file is being created' do
it 'sets the size' do
expect(subject.size).to eq(85)
end
end
context 'when file is being updated' do
it 'updates the size' do
subject.update!(file: fixture_file_upload('spec/fixtures/dk.png'))
expect(subject.size).to eq(1062)
end
end
end
describe 'file is being stored' do
subject { create(:ci_pipeline_artifact) }
context 'when existing object has local store' do
it 'is stored locally' do
expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
expect(subject.file).to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
end
......@@ -1044,19 +1044,6 @@ RSpec.describe Ci::Pipeline, :mailer do
end
describe '#stages' do
before do
create(:ci_stage_entity, project: project,
pipeline: pipeline,
name: 'build')
end
it 'returns persisted stages' do
expect(pipeline.stages).not_to be_empty
expect(pipeline.stages).to all(be_persisted)
end
end
describe '#ordered_stages' do
before do
create(:ci_stage_entity, project: project,
pipeline: pipeline,
......@@ -1086,60 +1073,25 @@ RSpec.describe Ci::Pipeline, :mailer do
name: 'cleanup')
end
subject { pipeline.ordered_stages }
context 'when using atomic processing' do
before do
stub_feature_flags(
ci_atomic_processing: true
)
end
context 'when pipelines is not complete' do
it 'returns stages in valid order' do
expect(subject).to all(be_a Ci::Stage)
expect(subject.map(&:name))
.to eq %w[sanity build test deploy cleanup]
end
end
context 'when pipeline is complete' do
before do
pipeline.succeed!
end
subject { pipeline.stages }
it 'returns stages in valid order' do
expect(subject).to all(be_a Ci::Stage)
expect(subject.map(&:name))
.to eq %w[sanity build test deploy cleanup]
end
context 'when pipelines is not complete' do
it 'returns stages in valid order' do
expect(subject).to all(be_a Ci::Stage)
expect(subject.map(&:name))
.to eq %w[sanity build test deploy cleanup]
end
end
context 'when using persisted stages' do
context 'when pipeline is complete' do
before do
stub_feature_flags(
ci_atomic_processing: false
)
end
context 'when pipelines is not complete' do
it 'still returns legacy stages' do
expect(subject).to all(be_a Ci::LegacyStage)
expect(subject.map(&:name)).to eq %w[build test]
end
pipeline.succeed!
end
context 'when pipeline is complete' do
before do
pipeline.succeed!
end
it 'returns stages in valid order' do
expect(subject).to all(be_a Ci::Stage)
expect(subject.map(&:name))
.to eq %w[sanity build test deploy cleanup]
end
it 'returns stages in valid order' do
expect(subject).to all(be_a Ci::Stage)
expect(subject.map(&:name))
.to eq %w[sanity build test deploy cleanup]
end
end
end
......
......@@ -66,51 +66,35 @@ RSpec.describe CommitStatus do
describe '#processed' do
subject { commit_status.processed }
context 'when ci_atomic_processing is disabled' do
context 'status is latest' do
before do
stub_feature_flags(ci_atomic_processing: false)
commit_status.save!
commit_status.update!(retried: false, status: :pending)
end
it { is_expected.to be_nil }
it { is_expected.to be_falsey }
end
context 'when ci_atomic_processing is enabled' do
context 'status is retried' do
before do
stub_feature_flags(ci_atomic_processing: true)
end
context 'status is latest' do
before do
commit_status.update!(retried: false, status: :pending)
end
it { is_expected.to be_falsey }
commit_status.update!(retried: true, status: :pending)
end
context 'status is retried' do
before do
commit_status.update!(retried: true, status: :pending)
end
it { is_expected.to be_truthy }
end
it { is_expected.to be_truthy }
end
it "processed state is always persisted" do
commit_status.update!(retried: false, status: :pending)
it "processed state is always persisted" do
commit_status.update!(retried: false, status: :pending)
# another process does mark object as processed
CommitStatus.find(commit_status.id).update_column(:processed, true)
# another process does mark object as processed
CommitStatus.find(commit_status.id).update_column(:processed, true)
# subsequent status transitions on the same instance
# always saves processed=false to DB even though
# the current value did not change
commit_status.update!(retried: false, status: :running)
# subsequent status transitions on the same instance
# always saves processed=false to DB even though
# the current value did not change
commit_status.update!(retried: false, status: :running)
# we look at a persisted state in DB
expect(CommitStatus.find(commit_status.id).processed).to eq(false)
end
# we look at a persisted state in DB
expect(CommitStatus.find(commit_status.id).processed).to eq(false)
end
end
......
......@@ -672,6 +672,12 @@ RSpec.describe MergeRequestDiff do
end
end
describe '#files_count' do
it 'returns number of diff files' do
expect(diff_with_commits.files_count).to eq(diff_with_commits.merge_request_diff_files.count)
end
end
describe '#first_commit' do
it 'returns first commit' do
expect(diff_with_commits.first_commit.sha).to eq(diff_with_commits.merge_request_diff_commits.last.sha)
......@@ -721,10 +727,12 @@ RSpec.describe MergeRequestDiff do
describe '#modified_paths' do
subject do
diff = create(:merge_request_diff)
create(:merge_request_diff_file, :new_file, merge_request_diff: diff)
create(:merge_request_diff_file, :renamed_file, merge_request_diff: diff)
diff
create(:merge_request_diff).tap do |diff|
create(:merge_request_diff_file, :new_file, merge_request_diff: diff)
create(:merge_request_diff_file, :renamed_file, merge_request_diff: diff)
diff.merge_request_diff_files.reset
end
end
it 'returns affected file paths' do
......
......@@ -58,7 +58,7 @@ RSpec.describe Packages::PackageFile, type: :model do
end
describe '#update_file_metadata callback' do
let_it_be(:package_file) { build(:package_file, :nuget, file_store: nil, size: nil) }
let_it_be(:package_file) { build(:package_file, :nuget, size: nil) }
subject { package_file.save! }
......@@ -67,9 +67,14 @@ RSpec.describe Packages::PackageFile, type: :model do
.to receive(:update_file_metadata)
.and_call_original
expect { subject }
.to change { package_file.file_store }.from(nil).to(::Packages::PackageFileUploader::Store::LOCAL)
.and change { package_file.size }.from(nil).to(3513)
# This expectation uses a stub because we can no longer test a change from
# `nil` to `1`, because the field is no longer nullable, and it defaults
# to `1`.
expect(package_file)
.to receive(:update_column)
.with(:file_store, ::Packages::PackageFileUploader::Store::LOCAL)
expect { subject }.to change { package_file.size }.from(nil).to(3513)
end
end
end
......@@ -5,20 +5,12 @@ require_relative 'shared_processing_service.rb'
require_relative 'shared_processing_service_tests_with_yaml.rb'
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
before do
stub_feature_flags(ci_atomic_processing: true)
# This feature flag is implicit
# Atomic Processing does not process statuses differently
stub_feature_flags(ci_composite_status: true)
end
it_behaves_like 'Pipeline Processing Service'
it_behaves_like 'Pipeline Processing Service Tests With Yaml'
private
def process_pipeline(initial_process: false)
def process_pipeline
described_class.new(pipeline).execute
end
end
# frozen_string_literal: true
require 'spec_helper'
require_relative 'shared_processing_service.rb'
require_relative 'shared_processing_service_tests_with_yaml.rb'
RSpec.describe Ci::PipelineProcessing::LegacyProcessingService do
before do
stub_feature_flags(ci_atomic_processing: false)
end
context 'when ci_composite_status is enabled' do
before do
stub_feature_flags(ci_composite_status: true)
end
it_behaves_like 'Pipeline Processing Service'
it_behaves_like 'Pipeline Processing Service Tests With Yaml'
end
context 'when ci_composite_status is disabled' do
before do
stub_feature_flags(ci_composite_status: false)
end
it_behaves_like 'Pipeline Processing Service'
it_behaves_like 'Pipeline Processing Service Tests With Yaml'
end
private
def process_pipeline(initial_process: false)
described_class.new(pipeline).execute(initial_process: initial_process)
end
end
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册