提交 427b23c1 编写于 作者: G GitLab Bot

Add latest changes from gitlab-org/gitlab@master

上级 6d60f910
<script> <script>
import { createNamespacedHelpers, mapState, mapActions } from 'vuex'; import { createNamespacedHelpers, mapState, mapActions } from 'vuex';
import { sprintf, s__ } from '~/locale';
import ClusterFormDropdown from './cluster_form_dropdown.vue';
import RegionDropdown from './region_dropdown.vue'; import RegionDropdown from './region_dropdown.vue';
import RoleNameDropdown from './role_name_dropdown.vue'; import RoleNameDropdown from './role_name_dropdown.vue';
import SecurityGroupDropdown from './security_group_dropdown.vue'; import SecurityGroupDropdown from './security_group_dropdown.vue';
import SubnetDropdown from './subnet_dropdown.vue';
import VpcDropdown from './vpc_dropdown.vue';
const { mapState: mapRegionsState, mapActions: mapRegionsActions } = createNamespacedHelpers( const { mapState: mapRegionsState, mapActions: mapRegionsActions } = createNamespacedHelpers(
'regions', 'regions',
); );
const { mapState: mapVpcsState, mapActions: mapVpcActions } = createNamespacedHelpers('vpcs');
const { mapState: mapSubnetsState, mapActions: mapSubnetActions } = createNamespacedHelpers(
'subnets',
);
export default { export default {
components: { components: {
ClusterFormDropdown,
RegionDropdown, RegionDropdown,
RoleNameDropdown, RoleNameDropdown,
SecurityGroupDropdown, SecurityGroupDropdown,
SubnetDropdown,
VpcDropdown,
}, },
computed: { computed: {
...mapState(['selectedRegion']), ...mapState(['selectedRegion', 'selectedVpc', 'selectedSubnet']),
...mapRegionsState({ ...mapRegionsState({
regions: 'items', regions: 'items',
isLoadingRegions: 'isLoadingItems', isLoadingRegions: 'isLoadingItems',
loadingRegionsError: 'loadingItemsError', loadingRegionsError: 'loadingItemsError',
}), }),
...mapVpcsState({
vpcs: 'items',
isLoadingVpcs: 'isLoadingItems',
loadingVpcsError: 'loadingItemsError',
}),
...mapSubnetsState({
subnets: 'items',
isLoadingSubnets: 'isLoadingItems',
loadingSubnetsError: 'loadingItemsError',
}),
vpcDropdownDisabled() {
return !this.selectedRegion;
},
subnetDropdownDisabled() {
return !this.selectedVpc;
},
vpcDropdownHelpText() {
return sprintf(
s__(
'ClusterIntegration|Select a VPC to use for your EKS Cluster resources. To use a new VPC, first create one on %{startLink}Amazon Web Services%{endLink}.',
),
{
startLink:
'<a href="https://console.aws.amazon.com/vpc/home?#vpc" target="_blank" rel="noopener noreferrer">',
endLink: '</a>',
},
false,
);
},
subnetDropdownHelpText() {
return sprintf(
s__(
'ClusterIntegration|Choose the %{startLink}subnets%{endLink} in your VPC where your worker nodes will run.',
),
{
startLink:
'<a href="https://console.aws.amazon.com/vpc/home?#subnets" target="_blank" rel="noopener noreferrer">',
endLink: '</a>',
},
false,
);
},
}, },
mounted() { mounted() {
this.fetchRegions(); this.fetchRegions();
}, },
methods: { methods: {
...mapActions(['setRegion']), ...mapActions(['setRegion', 'setVpc', 'setSubnet']),
...mapRegionsActions({ ...mapRegionsActions({ fetchRegions: 'fetchItems' }),
fetchRegions: 'fetchItems', ...mapVpcActions({ fetchVpcs: 'fetchItems' }),
}), ...mapSubnetActions({ fetchSubnets: 'fetchItems' }),
setRegionAndFetchVpcs(region) {
this.setRegion({ region });
this.fetchVpcs({ region });
},
setVpcAndFetchSubnets(vpc) {
this.setVpc({ vpc });
this.fetchSubnets({ vpc });
},
}, },
}; };
</script> </script>
...@@ -54,8 +107,52 @@ export default { ...@@ -54,8 +107,52 @@ export default {
:regions="regions" :regions="regions"
:error="loadingRegionsError" :error="loadingRegionsError"
:loading="isLoadingRegions" :loading="isLoadingRegions"
@input="setRegion({ region: $event })" @input="setRegionAndFetchVpcs($event)"
/>
</div>
<div class="form-group">
<label class="label-bold" name="eks-vpc" for="eks-vpc">{{
s__('ClusterIntegration|VPC')
}}</label>
<cluster-form-dropdown
field-id="eks-vpc"
field-name="eks-vpc"
:input="selectedVpc"
:items="vpcs"
:loading="isLoadingVpcs"
:disabled="vpcDropdownDisabled"
:disabled-text="s__('ClusterIntegration|Select a region to choose a VPC')"
:loading-text="s__('ClusterIntegration|Loading VPCs')"
:placeholder="s__('ClusterIntergation|Select a VPC')"
:search-field-placeholder="s__('ClusterIntegration|Search VPCs')"
:empty-text="s__('ClusterIntegration|No VPCs found')"
:has-errors="loadingVpcsError"
:error-message="s__('ClusterIntegration|Could not load VPCs for the selected region')"
@input="setVpcAndFetchSubnets($event)"
/>
<p class="form-text text-muted" v-html="vpcDropdownHelpText"></p>
</div>
<div class="form-group">
<label class="label-bold" name="eks-subnet" for="eks-subnet">{{
s__('ClusterIntegration|Subnet')
}}</label>
<cluster-form-dropdown
field-id="eks-subnet"
field-name="eks-subnet"
:input="selectedSubnet"
:items="subnets"
:loading="isLoadingSubnets"
:disabled="subnetDropdownDisabled"
:disabled-text="s__('ClusterIntegration|Select a VPC to choose a subnet')"
:loading-text="s__('ClusterIntegration|Loading subnets')"
:placeholder="s__('ClusterIntergation|Select a subnet')"
:search-field-placeholder="s__('ClusterIntegration|Search subnets')"
:empty-text="s__('ClusterIntegration|No subnet found')"
:has-errors="loadingSubnetsError"
:error-message="s__('ClusterIntegration|Could not load subnets for the selected VPC')"
@input="setSubnet({ subnet: $event })"
/> />
<p class="form-text text-muted" v-html="subnetDropdownHelpText"></p>
</div> </div>
</form> </form>
</template> </template>
...@@ -17,4 +17,45 @@ export const fetchRegions = () => ...@@ -17,4 +17,45 @@ export const fetchRegions = () =>
.send(); .send();
}); });
export const fetchVpcs = () =>
new Promise((resolve, reject) => {
const ec2 = new EC2();
ec2
.describeVpcs()
.on('success', ({ data: { Vpcs: vpcs } }) => {
const transformedVpcs = vpcs.map(({ VpcId: id }) => ({ id, name: id }));
resolve(transformedVpcs);
})
.on('error', error => {
reject(error);
})
.send();
});
export const fetchSubnets = ({ vpc }) =>
new Promise((resolve, reject) => {
const ec2 = new EC2();
ec2
.describeSubnets({
Filters: [
{
Name: 'vpc-id',
Values: [vpc.id],
},
],
})
.on('success', ({ data: { Subnets: subnets } }) => {
const transformedSubnets = subnets.map(({ SubnetId: id }) => ({ id, name: id }));
resolve(transformedSubnets);
})
.on('error', error => {
reject(error);
})
.send();
});
export default () => {}; export default () => {};
...@@ -4,4 +4,12 @@ export const setRegion = ({ commit }, payload) => { ...@@ -4,4 +4,12 @@ export const setRegion = ({ commit }, payload) => {
commit(types.SET_REGION, payload); commit(types.SET_REGION, payload);
}; };
export const setVpc = ({ commit }, payload) => {
commit(types.SET_VPC, payload);
};
export const setSubnet = ({ commit }, payload) => {
commit(types.SET_SUBNET, payload);
};
export default () => {}; export default () => {};
...@@ -4,10 +4,10 @@ export default fetchItems => ({ ...@@ -4,10 +4,10 @@ export default fetchItems => ({
requestItems: ({ commit }) => commit(types.REQUEST_ITEMS), requestItems: ({ commit }) => commit(types.REQUEST_ITEMS),
receiveItemsSuccess: ({ commit }, payload) => commit(types.RECEIVE_ITEMS_SUCCESS, payload), receiveItemsSuccess: ({ commit }, payload) => commit(types.RECEIVE_ITEMS_SUCCESS, payload),
receiveItemsError: ({ commit }, payload) => commit(types.RECEIVE_ITEMS_ERROR, payload), receiveItemsError: ({ commit }, payload) => commit(types.RECEIVE_ITEMS_ERROR, payload),
fetchItems: ({ dispatch }) => { fetchItems: ({ dispatch }, payload) => {
dispatch('requestItems'); dispatch('requestItems');
return fetchItems() return fetchItems(payload)
.then(items => dispatch('receiveItemsSuccess', { items })) .then(items => dispatch('receiveItemsSuccess', { items }))
.catch(error => dispatch('receiveItemsError', { error })); .catch(error => dispatch('receiveItemsError', { error }));
}, },
......
...@@ -19,6 +19,14 @@ const createStore = () => ...@@ -19,6 +19,14 @@ const createStore = () =>
namespaced: true, namespaced: true,
...clusterDropdownStore(awsServices.fetchRegions), ...clusterDropdownStore(awsServices.fetchRegions),
}, },
vpcs: {
namespaced: true,
...clusterDropdownStore(awsServices.fetchVpcs),
},
subnets: {
namespaced: true,
...clusterDropdownStore(awsServices.fetchSubnets),
},
}, },
}); });
......
// eslint-disable-next-line import/prefer-default-export
export const SET_REGION = 'SET_REGION'; export const SET_REGION = 'SET_REGION';
export const SET_VPC = 'SET_VPC';
export const SET_SUBNET = 'SET_SUBNET';
...@@ -4,4 +4,10 @@ export default { ...@@ -4,4 +4,10 @@ export default {
[types.SET_REGION](state, { region }) { [types.SET_REGION](state, { region }) {
state.selectedRegion = region; state.selectedRegion = region;
}, },
[types.SET_VPC](state, { vpc }) {
state.selectedVpc = vpc;
},
[types.SET_SUBNET](state, { subnet }) {
state.selectedSubnet = subnet;
},
}; };
...@@ -91,7 +91,7 @@ module UploadsActions ...@@ -91,7 +91,7 @@ module UploadsActions
upload_paths = uploader.upload_paths(params[:filename]) upload_paths = uploader.upload_paths(params[:filename])
upload = Upload.find_by(model: model, uploader: uploader_class.to_s, path: upload_paths) upload = Upload.find_by(model: model, uploader: uploader_class.to_s, path: upload_paths)
upload&.build_uploader upload&.retrieve_uploader
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
......
...@@ -86,3 +86,5 @@ module Ci ...@@ -86,3 +86,5 @@ module Ci
end end
end end
end end
Ci::PipelineSchedule.prepend_if_ee('EE::Ci::PipelineSchedule')
...@@ -45,3 +45,5 @@ module Ci ...@@ -45,3 +45,5 @@ module Ci
end end
end end
end end
Ci::Trigger.prepend_if_ee('EE::Ci::Trigger')
...@@ -64,3 +64,5 @@ module Clusters ...@@ -64,3 +64,5 @@ module Clusters
end end
end end
end end
Clusters::Concerns::ApplicationCore.prepend_if_ee('EE::Clusters::Concerns::ApplicationCore')
...@@ -72,8 +72,15 @@ class CommitCollection ...@@ -72,8 +72,15 @@ class CommitCollection
end.compact] end.compact]
# Replace the commits, keeping the same order # Replace the commits, keeping the same order
@commits = @commits.map do |c| @commits = @commits.map do |original_commit|
replacements.fetch(c.id, c) # Return the original instance: if it didn't need to be batchloaded, it was
# already enriched.
batch_loaded_commit = replacements.fetch(original_commit.id, original_commit)
# If batch loading the commit failed, fall back to the original commit.
# We need to explicitly check `.nil?` since otherwise a `BatchLoader` instance
# that looks like `nil` is returned.
batch_loaded_commit.nil? ? original_commit : batch_loaded_commit
end end
self self
......
...@@ -15,7 +15,7 @@ class Upload < ApplicationRecord ...@@ -15,7 +15,7 @@ class Upload < ApplicationRecord
scope :with_files_stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) } scope :with_files_stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
before_save :calculate_checksum!, if: :foreground_checksummable? before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable? after_commit :schedule_checksum, if: :needs_checksum?
# as the FileUploader is not mounted, the default CarrierWave ActiveRecord # as the FileUploader is not mounted, the default CarrierWave ActiveRecord
# hooks are not executed and the file will not be deleted # hooks are not executed and the file will not be deleted
...@@ -53,20 +53,41 @@ class Upload < ApplicationRecord ...@@ -53,20 +53,41 @@ class Upload < ApplicationRecord
def calculate_checksum! def calculate_checksum!
self.checksum = nil self.checksum = nil
return unless checksummable? return unless needs_checksum?
self.checksum = Digest::SHA256.file(absolute_path).hexdigest self.checksum = Digest::SHA256.file(absolute_path).hexdigest
end end
# Initialize the associated Uploader class with current model
#
# @param [String] mounted_as
# @return [GitlabUploader] one of the subclasses, defined at the model's uploader attribute
def build_uploader(mounted_as = nil) def build_uploader(mounted_as = nil)
uploader_class.new(model, mounted_as || mount_point).tap do |uploader| uploader_class.new(model, mounted_as || mount_point).tap do |uploader|
uploader.upload = self uploader.upload = self
end
end
# Initialize the associated Uploader class with current model and
# retrieve existing file from the store to a local cache
#
# @param [String] mounted_as
# @return [GitlabUploader] one of the subclasses, defined at the model's uploader attribute
def retrieve_uploader(mounted_as = nil)
build_uploader(mounted_as).tap do |uploader|
uploader.retrieve_from_store!(identifier) uploader.retrieve_from_store!(identifier)
end end
end end
# This checks for existence of the upload on storage
#
# @return [Boolean] whether upload exists on storage
def exist? def exist?
exist = File.exist?(absolute_path) exist = if local?
File.exist?(absolute_path)
else
retrieve_uploader.exists?
end
# Help sysadmins find missing upload files # Help sysadmins find missing upload files
if persisted? && !exist if persisted? && !exist
...@@ -91,18 +112,24 @@ class Upload < ApplicationRecord ...@@ -91,18 +112,24 @@ class Upload < ApplicationRecord
store == ObjectStorage::Store::LOCAL store == ObjectStorage::Store::LOCAL
end end
# Returns whether generating checksum is needed
#
# This takes into account whether file exists, if any checksum exists
# or if the storage has checksum generation code implemented
#
# @return [Boolean] whether generating a checksum is needed
def needs_checksum?
checksum.nil? && local? && exist?
end
private private
def delete_file! def delete_file!
build_uploader.remove! retrieve_uploader.remove!
end
def checksummable?
checksum.nil? && local? && exist?
end end
def foreground_checksummable? def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD needs_checksum? && size <= CHECKSUM_THRESHOLD
end end
def schedule_checksum def schedule_checksum
......
...@@ -19,7 +19,7 @@ class AvatarUploader < GitlabUploader ...@@ -19,7 +19,7 @@ class AvatarUploader < GitlabUploader
end end
def absolute_path def absolute_path
self.class.absolute_path(model.avatar.upload) self.class.absolute_path(upload)
end end
private private
......
...@@ -99,6 +99,17 @@ class GitlabUploader < CarrierWave::Uploader::Base ...@@ -99,6 +99,17 @@ class GitlabUploader < CarrierWave::Uploader::Base
end end
end end
# Used to replace an existing upload with another +file+ without modifying stored metadata
# Use this method only to repair/replace an existing upload, or to upload to a Geo secondary node
#
# @param [CarrierWave::SanitizedFile] file that will replace existing upload
# @return CarrierWave::SanitizedFile
def replace_file_without_saving!(file)
raise ArgumentError, 'should be a CarrierWave::SanitizedFile' unless file.is_a? CarrierWave::SanitizedFile
storage.store!(file)
end
private private
# Designed to be overridden by child uploaders that have a dynamic path # Designed to be overridden by child uploaders that have a dynamic path
......
...@@ -12,7 +12,7 @@ class ImportIssuesCsvWorker ...@@ -12,7 +12,7 @@ class ImportIssuesCsvWorker
@project = Project.find(project_id) @project = Project.find(project_id)
@upload = Upload.find(upload_id) @upload = Upload.find(upload_id)
importer = Issues::ImportCsvService.new(@user, @project, @upload.build_uploader) importer = Issues::ImportCsvService.new(@user, @project, @upload.retrieve_uploader)
importer.execute importer.execute
@upload.destroy @upload.destroy
......
...@@ -22,7 +22,7 @@ module ObjectStorage ...@@ -22,7 +22,7 @@ module ObjectStorage
def build_uploader(subject, mount_point) def build_uploader(subject, mount_point)
case subject case subject
when Upload then subject.build_uploader(mount_point) when Upload then subject.retrieve_uploader(mount_point)
else else
subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend
end end
......
...@@ -119,7 +119,7 @@ module ObjectStorage ...@@ -119,7 +119,7 @@ module ObjectStorage
end end
def build_uploaders(uploads) def build_uploaders(uploads)
uploads.map { |upload| upload.build_uploader(@mounted_as) } uploads.map { |upload| upload.retrieve_uploader(@mounted_as) }
end end
def migrate(uploads) def migrate(uploads)
......
---
title: Add pull_mirror_branch_prefix column on projects table
merge_request: 17368
author:
type: added
---
title: Adds sorting of packages at the project level
merge_request: 15448
author:
type: added
---
title: Fix viewing merge reqeust from a fork that's being deleted
merge_request: 17894
author:
type: fixed
---
title: Update Pages to v1.11.0
merge_request: 18010
author:
type: other
# frozen_string_literal: true
module Com
module Gitlab
module Patch
module DrawRoute
extend ::Gitlab::Utils::Override
override :draw_com
def draw_com(routes_name)
draw_route(route_path("com/config/routes/#{routes_name}.rb"))
end
end
end
end
end
# frozen_string_literal: true
require 'fast_spec_helper'
require 'com_spec_helper'
describe Gitlab::Patch::DrawRoute do
subject do
Class.new do
include Gitlab::Patch::DrawRoute
def route_path(route_name)
File.expand_path("../../../../../#{route_name}", __dir__)
end
end.new
end
before do
allow(subject).to receive(:instance_eval)
end
it 'raises an error when nothing is drawn' do
expect { subject.draw(:non_existing) }
.to raise_error(described_class::RoutesNotFound)
end
end
...@@ -22,6 +22,7 @@ module Gitlab ...@@ -22,6 +22,7 @@ module Gitlab
require_dependency Rails.root.join('lib/gitlab/current_settings') require_dependency Rails.root.join('lib/gitlab/current_settings')
require_dependency Rails.root.join('lib/gitlab/middleware/read_only') require_dependency Rails.root.join('lib/gitlab/middleware/read_only')
require_dependency Rails.root.join('lib/gitlab/middleware/basic_health_check') require_dependency Rails.root.join('lib/gitlab/middleware/basic_health_check')
require_dependency Rails.root.join('config/light_settings')
# Settings in config/environments/* take precedence over those specified here. # Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers # Application configuration should go into files in config/initializers
...@@ -62,6 +63,15 @@ module Gitlab ...@@ -62,6 +63,15 @@ module Gitlab
config.paths['app/views'].unshift "#{config.root}/ee/app/views" config.paths['app/views'].unshift "#{config.root}/ee/app/views"
end end
if LightSettings.com?
com_paths = config.eager_load_paths.each_with_object([]) do |path, memo|
com_path = config.root.join('com', Pathname.new(path).relative_path_from(config.root))
memo << com_path.to_s
end
config.eager_load_paths.push(*com_paths)
end
# Rake tasks ignore the eager loading settings, so we need to set the # Rake tasks ignore the eager loading settings, so we need to set the
# autoload paths explicitly # autoload paths explicitly
config.autoload_paths = config.eager_load_paths.dup config.autoload_paths = config.eager_load_paths.dup
......
# frozen_string_literal: true
require 'active_support/inflector'
module InjectComModule
def prepend_if_com(constant, with_descendants: false)
return unless Gitlab.com?
com_module = constant.constantize
prepend(com_module)
if with_descendants
descendants.each { |descendant| descendant.prepend(com_module) }
end
end
def extend_if_com(constant)
extend(constant.constantize) if Gitlab.com?
end
def include_if_com(constant)
include(constant.constantize) if Gitlab.com?
end
end
Module.prepend(InjectComModule)
# frozen_string_literal: true
class LightSettings
GL_HOST ||= 'gitlab.com'
GL_SUBDOMAIN_REGEX ||= %r{\A[a-z0-9]+\.gitlab\.com\z}.freeze
class << self
def com?
return Thread.current[:is_com] unless Thread.current[:is_com].nil?
Thread.current[:is_com] = host == GL_HOST || gl_subdomain?
end
private
def config
YAML.safe_load(File.read(settings_path), aliases: true)[Rails.env]
end
def settings_path
Rails.root.join('config', 'gitlab.yml')
end
def host
config['gitlab']['host']
end
def gl_subdomain?
GL_SUBDOMAIN_REGEX === host
end
end
end
# frozen_string_literal: true
class AddPullMirrorBranchPrefixToProjects < ActiveRecord::Migration[5.2]
DOWNTIME = false
def change
add_column :projects, :pull_mirror_branch_prefix, :string, limit: 50
end
end
...@@ -2922,6 +2922,7 @@ ActiveRecord::Schema.define(version: 2019_09_27_074328) do ...@@ -2922,6 +2922,7 @@ ActiveRecord::Schema.define(version: 2019_09_27_074328) do
t.boolean "emails_disabled" t.boolean "emails_disabled"
t.integer "max_pages_size" t.integer "max_pages_size"
t.integer "max_artifacts_size" t.integer "max_artifacts_size"
t.string "pull_mirror_branch_prefix", limit: 50
t.index "lower((name)::text)", name: "index_projects_on_lower_name" t.index "lower((name)::text)", name: "index_projects_on_lower_name"
t.index ["archived", "pending_delete", "merge_requests_require_code_owner_approval"], name: "projects_requiring_code_owner_approval", where: "((pending_delete = false) AND (archived = false) AND (merge_requests_require_code_owner_approval = true))" t.index ["archived", "pending_delete", "merge_requests_require_code_owner_approval"], name: "projects_requiring_code_owner_approval", where: "((pending_delete = false) AND (archived = false) AND (merge_requests_require_code_owner_approval = true))"
t.index ["created_at"], name: "index_projects_on_created_at" t.index ["created_at"], name: "index_projects_on_created_at"
......
# frozen_string_literal: true # frozen_string_literal: true
require 'pathname' require 'pathname'
require_relative '../config/light_settings'
module Gitlab module Gitlab
def self.root def self.root
...@@ -37,24 +38,18 @@ module Gitlab ...@@ -37,24 +38,18 @@ module Gitlab
COM_URL = 'https://gitlab.com' COM_URL = 'https://gitlab.com'
APP_DIRS_PATTERN = %r{^/?(app|config|ee|lib|spec|\(\w*\))}.freeze APP_DIRS_PATTERN = %r{^/?(app|config|ee|lib|spec|\(\w*\))}.freeze
SUBDOMAIN_REGEX = %r{\Ahttps://[a-z0-9]+\.gitlab\.com\z}.freeze
VERSION = File.read(root.join("VERSION")).strip.freeze VERSION = File.read(root.join("VERSION")).strip.freeze
INSTALLATION_TYPE = File.read(root.join("INSTALLATION_TYPE")).strip.freeze INSTALLATION_TYPE = File.read(root.join("INSTALLATION_TYPE")).strip.freeze
HTTP_PROXY_ENV_VARS = %w(http_proxy https_proxy HTTP_PROXY HTTPS_PROXY).freeze HTTP_PROXY_ENV_VARS = %w(http_proxy https_proxy HTTP_PROXY HTTPS_PROXY).freeze
def self.com? def self.com?
# Check `gl_subdomain?` as well to keep parity with gitlab.com LightSettings.com?
Gitlab.config.gitlab.url == COM_URL || gl_subdomain?
end end
def self.org? def self.org?
Gitlab.config.gitlab.url == 'https://dev.gitlab.org' Gitlab.config.gitlab.url == 'https://dev.gitlab.org'
end end
def self.gl_subdomain?
SUBDOMAIN_REGEX === Gitlab.config.gitlab.url
end
def self.dev_env_org_or_com? def self.dev_env_org_or_com?
dev_env_or_com? || org? dev_env_or_com? || org?
end end
...@@ -79,6 +74,10 @@ module Gitlab ...@@ -79,6 +74,10 @@ module Gitlab
yield if ee? yield if ee?
end end
def self.com
yield if com?
end
def self.http_proxy_env? def self.http_proxy_env?
HTTP_PROXY_ENV_VARS.any? { |name| ENV[name] } HTTP_PROXY_ENV_VARS.any? { |name| ENV[name] }
end end
......
...@@ -92,7 +92,7 @@ module Gitlab ...@@ -92,7 +92,7 @@ module Gitlab
def legacy_file_uploader def legacy_file_uploader
strong_memoize(:legacy_file_uploader) do strong_memoize(:legacy_file_uploader) do
uploader = upload.build_uploader uploader = upload.retrieve_uploader
uploader.retrieve_from_store!(File.basename(upload.path)) uploader.retrieve_from_store!(File.basename(upload.path))
uploader uploader
end end
......
...@@ -139,6 +139,7 @@ excluded_attributes: ...@@ -139,6 +139,7 @@ excluded_attributes:
- :mirror_trigger_builds - :mirror_trigger_builds
- :only_mirror_protected_branches - :only_mirror_protected_branches
- :pull_mirror_available_overridden - :pull_mirror_available_overridden
- :pull_mirror_branch_prefix
- :mirror_overwrites_diverged_branches - :mirror_overwrites_diverged_branches
- :packages_enabled - :packages_enabled
- :mirror_last_update_at - :mirror_last_update_at
......
...@@ -68,7 +68,7 @@ module Gitlab ...@@ -68,7 +68,7 @@ module Gitlab
yield(@project.avatar) yield(@project.avatar)
else else
project_uploads_except_avatar(avatar_path).find_each(batch_size: UPLOADS_BATCH_SIZE) do |upload| project_uploads_except_avatar(avatar_path).find_each(batch_size: UPLOADS_BATCH_SIZE) do |upload|
yield(upload.build_uploader) yield(upload.retrieve_uploader)
end end
end end
end end
......
...@@ -6,11 +6,12 @@ module Gitlab ...@@ -6,11 +6,12 @@ module Gitlab
module Patch module Patch
module DrawRoute module DrawRoute
prepend_if_ee('EE::Gitlab::Patch::DrawRoute') # rubocop: disable Cop/InjectEnterpriseEditionModule prepend_if_ee('EE::Gitlab::Patch::DrawRoute') # rubocop: disable Cop/InjectEnterpriseEditionModule
prepend_if_com('Com::Gitlab::Patch::DrawRoute')
RoutesNotFound = Class.new(StandardError) RoutesNotFound = Class.new(StandardError)
def draw(routes_name) def draw(routes_name)
drawn_any = draw_ce(routes_name) | draw_ee(routes_name) drawn_any = draw_ce(routes_name) | draw_ee(routes_name) | draw_com(routes_name)
drawn_any || raise(RoutesNotFound.new("Cannot find #{routes_name}")) drawn_any || raise(RoutesNotFound.new("Cannot find #{routes_name}"))
end end
...@@ -23,6 +24,10 @@ module Gitlab ...@@ -23,6 +24,10 @@ module Gitlab
true true
end end
def draw_com(_)
false
end
def route_path(routes_name) def route_path(routes_name)
Rails.root.join(routes_name) Rails.root.join(routes_name)
end end
......
...@@ -68,7 +68,7 @@ module Gitlab ...@@ -68,7 +68,7 @@ module Gitlab
} }
relation.find_each(find_params) do |upload| relation.find_each(find_params) do |upload|
clean(upload.build_uploader, dry_run: dry_run) clean(upload.retrieve_uploader, dry_run: dry_run)
sleep sleep_time if sleep_time sleep sleep_time if sleep_time
rescue => err rescue => err
logger.error "failed to sanitize #{upload_ref(upload)}: #{err.message}" logger.error "failed to sanitize #{upload_ref(upload)}: #{err.message}"
......
...@@ -32,7 +32,7 @@ module Gitlab ...@@ -32,7 +32,7 @@ module Gitlab
end end
def remote_object_exists?(upload) def remote_object_exists?(upload)
upload.build_uploader.file.exists? upload.retrieve_uploader.file.exists?
end end
end end
end end
......
...@@ -2421,6 +2421,9 @@ msgstr "" ...@@ -2421,6 +2421,9 @@ msgstr ""
msgid "Branch not loaded - %{branchId}" msgid "Branch not loaded - %{branchId}"
msgstr "" msgstr ""
msgid "Branch prefix"
msgstr ""
msgid "BranchSwitcherPlaceholder|Search branches" msgid "BranchSwitcherPlaceholder|Search branches"
msgstr "" msgstr ""
...@@ -3351,6 +3354,9 @@ msgstr "" ...@@ -3351,6 +3354,9 @@ msgstr ""
msgid "ClusterIntegration|Choose a prefix to be used for your namespaces. Defaults to your project path." msgid "ClusterIntegration|Choose a prefix to be used for your namespaces. Defaults to your project path."
msgstr "" msgstr ""
msgid "ClusterIntegration|Choose the %{startLink}subnets%{endLink} in your VPC where your worker nodes will run."
msgstr ""
msgid "ClusterIntegration|Choose which applications to install on your Kubernetes cluster. Helm Tiller is required to install any of the following applications." msgid "ClusterIntegration|Choose which applications to install on your Kubernetes cluster. Helm Tiller is required to install any of the following applications."
msgstr "" msgstr ""
...@@ -3390,9 +3396,15 @@ msgstr "" ...@@ -3390,9 +3396,15 @@ msgstr ""
msgid "ClusterIntegration|Copy Service Token" msgid "ClusterIntegration|Copy Service Token"
msgstr "" msgstr ""
msgid "ClusterIntegration|Could not load VPCs for the selected region"
msgstr ""
msgid "ClusterIntegration|Could not load regions from your AWS account" msgid "ClusterIntegration|Could not load regions from your AWS account"
msgstr "" msgstr ""
msgid "ClusterIntegration|Could not load subnets for the selected VPC"
msgstr ""
msgid "ClusterIntegration|Create Kubernetes cluster" msgid "ClusterIntegration|Create Kubernetes cluster"
msgstr "" msgstr ""
...@@ -3579,6 +3591,12 @@ msgstr "" ...@@ -3579,6 +3591,12 @@ msgstr ""
msgid "ClusterIntegration|Loading Regions" msgid "ClusterIntegration|Loading Regions"
msgstr "" msgstr ""
msgid "ClusterIntegration|Loading VPCs"
msgstr ""
msgid "ClusterIntegration|Loading subnets"
msgstr ""
msgid "ClusterIntegration|Machine type" msgid "ClusterIntegration|Machine type"
msgstr "" msgstr ""
...@@ -3591,6 +3609,9 @@ msgstr "" ...@@ -3591,6 +3609,9 @@ msgstr ""
msgid "ClusterIntegration|No IAM Roles found" msgid "ClusterIntegration|No IAM Roles found"
msgstr "" msgstr ""
msgid "ClusterIntegration|No VPCs found"
msgstr ""
msgid "ClusterIntegration|No machine types matched your search" msgid "ClusterIntegration|No machine types matched your search"
msgstr "" msgstr ""
...@@ -3603,6 +3624,9 @@ msgstr "" ...@@ -3603,6 +3624,9 @@ msgstr ""
msgid "ClusterIntegration|No region found" msgid "ClusterIntegration|No region found"
msgstr "" msgstr ""
msgid "ClusterIntegration|No subnet found"
msgstr ""
msgid "ClusterIntegration|No zones matched your search" msgid "ClusterIntegration|No zones matched your search"
msgstr "" msgstr ""
...@@ -3672,6 +3696,9 @@ msgstr "" ...@@ -3672,6 +3696,9 @@ msgstr ""
msgid "ClusterIntegration|Search IAM Roles" msgid "ClusterIntegration|Search IAM Roles"
msgstr "" msgstr ""
msgid "ClusterIntegration|Search VPCs"
msgstr ""
msgid "ClusterIntegration|Search machine types" msgid "ClusterIntegration|Search machine types"
msgstr "" msgstr ""
...@@ -3681,12 +3708,24 @@ msgstr "" ...@@ -3681,12 +3708,24 @@ msgstr ""
msgid "ClusterIntegration|Search regions" msgid "ClusterIntegration|Search regions"
msgstr "" msgstr ""
msgid "ClusterIntegration|Search subnets"
msgstr ""
msgid "ClusterIntegration|Search zones" msgid "ClusterIntegration|Search zones"
msgstr "" msgstr ""
msgid "ClusterIntegration|See and edit the details for your Kubernetes cluster" msgid "ClusterIntegration|See and edit the details for your Kubernetes cluster"
msgstr "" msgstr ""
msgid "ClusterIntegration|Select a VPC to choose a subnet"
msgstr ""
msgid "ClusterIntegration|Select a VPC to use for your EKS Cluster resources. To use a new VPC, first create one on %{startLink}Amazon Web Services%{endLink}."
msgstr ""
msgid "ClusterIntegration|Select a region to choose a VPC"
msgstr ""
msgid "ClusterIntegration|Select machine type" msgid "ClusterIntegration|Select machine type"
msgstr "" msgstr ""
...@@ -3735,6 +3774,9 @@ msgstr "" ...@@ -3735,6 +3774,9 @@ msgstr ""
msgid "ClusterIntegration|Specifying a domain will allow you to use Auto Review Apps and Auto Deploy stages for %{auto_devops_start}Auto DevOps%{auto_devops_end}. The domain should have a wildcard DNS configured matching the domain." msgid "ClusterIntegration|Specifying a domain will allow you to use Auto Review Apps and Auto Deploy stages for %{auto_devops_start}Auto DevOps%{auto_devops_end}. The domain should have a wildcard DNS configured matching the domain."
msgstr "" msgstr ""
msgid "ClusterIntegration|Subnet"
msgstr ""
msgid "ClusterIntegration|The Kubernetes certificate used to authenticate to the cluster." msgid "ClusterIntegration|The Kubernetes certificate used to authenticate to the cluster."
msgstr "" msgstr ""
...@@ -3783,6 +3825,9 @@ msgstr "" ...@@ -3783,6 +3825,9 @@ msgstr ""
msgid "ClusterIntegration|Uses the Cloud Run, Istio, and HTTP Load Balancing addons for this cluster." msgid "ClusterIntegration|Uses the Cloud Run, Istio, and HTTP Load Balancing addons for this cluster."
msgstr "" msgstr ""
msgid "ClusterIntegration|VPC"
msgstr ""
msgid "ClusterIntegration|Validating project billing status" msgid "ClusterIntegration|Validating project billing status"
msgstr "" msgstr ""
...@@ -3831,9 +3876,15 @@ msgstr "" ...@@ -3831,9 +3876,15 @@ msgstr ""
msgid "ClusterIntegration|sign up" msgid "ClusterIntegration|sign up"
msgstr "" msgstr ""
msgid "ClusterIntergation|Select a VPC"
msgstr ""
msgid "ClusterIntergation|Select a region" msgid "ClusterIntergation|Select a region"
msgstr "" msgstr ""
msgid "ClusterIntergation|Select a subnet"
msgstr ""
msgid "ClusterIntergation|Select role name" msgid "ClusterIntergation|Select role name"
msgstr "" msgstr ""
...@@ -8640,6 +8691,9 @@ msgstr "" ...@@ -8640,6 +8691,9 @@ msgstr ""
msgid "Introducing Your Conversational Development Index" msgid "Introducing Your Conversational Development Index"
msgstr "" msgstr ""
msgid "Invalid Git ref"
msgstr ""
msgid "Invalid Insights config file detected" msgid "Invalid Insights config file detected"
msgstr "" msgstr ""
...@@ -10120,6 +10174,9 @@ msgstr "" ...@@ -10120,6 +10174,9 @@ msgstr ""
msgid "Mirror user" msgid "Mirror user"
msgstr "" msgstr ""
msgid "Mirrored branches will have this prefix. If you enabled 'Only mirror protected branches' you need to include this prefix on protected branches in this project or nothing will be mirrored."
msgstr ""
msgid "Mirrored repositories" msgid "Mirrored repositories"
msgstr "" msgstr ""
...@@ -14890,6 +14947,12 @@ msgstr "" ...@@ -14890,6 +14947,12 @@ msgstr ""
msgid "SortOptions|Start soon" msgid "SortOptions|Start soon"
msgstr "" msgstr ""
msgid "SortOptions|Type"
msgstr ""
msgid "SortOptions|Version"
msgstr ""
msgid "SortOptions|Weight" msgid "SortOptions|Weight"
msgstr "" msgstr ""
......
# frozen_string_literal: true
Settings.gitlab[:url] = "https://test.gitlab.com"
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
FactoryBot.define do FactoryBot.define do
factory :upload do factory :upload do
model { build(:project) } model { create(:project) }
size { 100.kilobytes } size { 100.kilobytes }
uploader { "AvatarUploader" } uploader { "AvatarUploader" }
mount_point { :avatar } mount_point { :avatar }
...@@ -11,23 +11,27 @@ FactoryBot.define do ...@@ -11,23 +11,27 @@ FactoryBot.define do
# we should build a mount agnostic upload by default # we should build a mount agnostic upload by default
transient do transient do
filename { 'myfile.jpg' } filename { 'avatar.jpg' }
end end
# this needs to comply with RecordsUpload::Concern#upload_path path do
path { File.join("uploads/-/system", model.class.underscore, mount_point.to_s, 'avatar.jpg') } uploader_instance = Object.const_get(uploader.to_s).new(model, mount_point)
File.join(uploader_instance.store_dir, filename)
end
trait :personal_snippet_upload do trait :personal_snippet_upload do
uploader { "PersonalFileUploader" } model { create(:personal_snippet) }
path { File.join(secret, filename) } path { File.join(secret, filename) }
model { build(:personal_snippet) } uploader { "PersonalFileUploader" }
secret { SecureRandom.hex } secret { SecureRandom.hex }
mount_point { nil }
end end
trait :issuable_upload do trait :issuable_upload do
uploader { "FileUploader" } uploader { "FileUploader" }
path { File.join(secret, filename) } path { File.join(secret, filename) }
secret { SecureRandom.hex } secret { SecureRandom.hex }
mount_point { nil }
end end
trait :with_file do trait :with_file do
...@@ -42,22 +46,23 @@ FactoryBot.define do ...@@ -42,22 +46,23 @@ FactoryBot.define do
end end
trait :namespace_upload do trait :namespace_upload do
model { build(:group) } model { create(:group) }
path { File.join(secret, filename) } path { File.join(secret, filename) }
uploader { "NamespaceFileUploader" } uploader { "NamespaceFileUploader" }
secret { SecureRandom.hex } secret { SecureRandom.hex }
mount_point { nil }
end end
trait :favicon_upload do trait :favicon_upload do
model { build(:appearance) } model { create(:appearance) }
path { File.join(secret, filename) }
uploader { "FaviconUploader" } uploader { "FaviconUploader" }
secret { SecureRandom.hex } secret { SecureRandom.hex }
mount_point { :favicon }
end end
trait :attachment_upload do trait :attachment_upload do
mount_point { :attachment } mount_point { :attachment }
model { build(:note) } model { create(:note) }
uploader { "AttachmentUploader" } uploader { "AttachmentUploader" }
end end
end end
......
...@@ -5,10 +5,12 @@ ENV['IN_MEMORY_APPLICATION_SETTINGS'] = 'true' ...@@ -5,10 +5,12 @@ ENV['IN_MEMORY_APPLICATION_SETTINGS'] = 'true'
require 'active_support/dependencies' require 'active_support/dependencies'
require_relative '../config/initializers/0_inject_enterprise_edition_module' require_relative '../config/initializers/0_inject_enterprise_edition_module'
require_relative '../config/initializers/0_inject_com_module'
require_relative '../config/settings' require_relative '../config/settings'
require_relative 'support/rspec' require_relative 'support/rspec'
require 'active_support/all' require 'active_support/all'
ActiveSupport::Dependencies.autoload_paths << 'lib' ActiveSupport::Dependencies.autoload_paths << 'lib'
ActiveSupport::Dependencies.autoload_paths << 'ee/lib' ActiveSupport::Dependencies.autoload_paths << 'ee/lib'
ActiveSupport::Dependencies.autoload_paths << 'com/lib'
ActiveSupport::XmlMini.backend = 'Nokogiri' ActiveSupport::XmlMini.backend = 'Nokogiri'
# frozen_string_literal: true
require 'spec_helper'
# This is a feature spec because the problems arrise when rendering the view for
# an actual project for which the repository is removed but the cached not
# updated.
# This can occur when the fork a merge request is created from is in the process
# of being destroyed.
describe 'User views merged merge request from deleted fork' do
include ProjectForksHelper
let(:project) { create(:project, :repository) }
let(:source_project) { fork_project(project, nil, repository: true) }
let(:user) { project.owner }
let!(:merge_request) { create(:merge_request, :merged, source_project: source_project, target_project: project) }
before do
sign_in user
fork_owner = source_project.namespace.owners.first
# Place the source_project in the weird in between state
source_project.update_attribute(:pending_delete, true)
Projects::DestroyService.new(source_project, fork_owner, {}).__send__(:trash_repositories!)
end
it 'correctly shows the merge request' do
visit(merge_request_path(merge_request))
expect(page).to have_content(merge_request.title)
end
end
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Commit pipeline status component when polling is not successful renders not found CI icon without loader 1`] = `
<div
class="ci-status-link"
>
<a>
<ciicon-stub
aria-label="Pipeline: not found"
cssclasses=""
data-container="body"
data-original-title="Pipeline: not found"
size="24"
status="[object Object]"
title=""
/>
</a>
</div>
`;
exports[`Commit pipeline status component when polling is successful renders CI icon without loader 1`] = `
<div
class="ci-status-link"
>
<a
href="/frontend-fixtures/pipelines-project/pipelines/47"
>
<ciicon-stub
aria-label="Pipeline: pending"
cssclasses=""
data-container="body"
data-original-title="Pipeline: pending"
size="24"
status="[object Object]"
title=""
/>
</a>
</div>
`;
...@@ -2,6 +2,7 @@ import Visibility from 'visibilityjs'; ...@@ -2,6 +2,7 @@ import Visibility from 'visibilityjs';
import { GlLoadingIcon } from '@gitlab/ui'; import { GlLoadingIcon } from '@gitlab/ui';
import Poll from '~/lib/utils/poll'; import Poll from '~/lib/utils/poll';
import flash from '~/flash'; import flash from '~/flash';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue'; import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
import { shallowMount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import { getJSONFixture } from '../helpers/fixtures'; import { getJSONFixture } from '../helpers/fixtures';
...@@ -36,6 +37,10 @@ describe('Commit pipeline status component', () => { ...@@ -36,6 +37,10 @@ describe('Commit pipeline status component', () => {
}); });
}; };
const findLoader = () => wrapper.find(GlLoadingIcon);
const findLink = () => wrapper.find('a');
const findCiIcon = () => findLink().find(CiIcon);
afterEach(() => { afterEach(() => {
wrapper.destroy(); wrapper.destroy();
wrapper = null; wrapper = null;
...@@ -111,14 +116,14 @@ describe('Commit pipeline status component', () => { ...@@ -111,14 +116,14 @@ describe('Commit pipeline status component', () => {
it('shows the loading icon at start', () => { it('shows the loading icon at start', () => {
createComponent(); createComponent();
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true); expect(findLoader().exists()).toBe(true);
pollConfig.successCallback({ pollConfig.successCallback({
data: { pipelines: [] }, data: { pipelines: [] },
}); });
return wrapper.vm.$nextTick().then(() => { return wrapper.vm.$nextTick().then(() => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(false); expect(findLoader().exists()).toBe(false);
}); });
}); });
...@@ -130,8 +135,17 @@ describe('Commit pipeline status component', () => { ...@@ -130,8 +135,17 @@ describe('Commit pipeline status component', () => {
return wrapper.vm.$nextTick(); return wrapper.vm.$nextTick();
}); });
it('renders CI icon without loader', () => { it('does not render loader', () => {
expect(wrapper.element).toMatchSnapshot(); expect(findLoader().exists()).toBe(false);
});
it('renders link with href', () => {
expect(findLink().attributes('href')).toEqual(mockCiStatus.details_path);
});
it('renders CI icon', () => {
expect(findCiIcon().attributes('data-original-title')).toEqual('Pipeline: pending');
expect(findCiIcon().props('status')).toEqual(mockCiStatus);
}); });
}); });
...@@ -140,8 +154,21 @@ describe('Commit pipeline status component', () => { ...@@ -140,8 +154,21 @@ describe('Commit pipeline status component', () => {
pollConfig.errorCallback(); pollConfig.errorCallback();
}); });
it('renders not found CI icon without loader', () => { it('does not render loader', () => {
expect(wrapper.element).toMatchSnapshot(); expect(findLoader().exists()).toBe(false);
});
it('renders link with href', () => {
expect(findLink().attributes('href')).toBeUndefined();
});
it('renders not found CI icon', () => {
expect(findCiIcon().attributes('data-original-title')).toEqual('Pipeline: not found');
expect(findCiIcon().props('status')).toEqual({
text: 'not found',
icon: 'status_notfound',
group: 'notfound',
});
}); });
it('displays flash error message', () => { it('displays flash error message', () => {
......
...@@ -4,6 +4,7 @@ import Vue from 'vue'; ...@@ -4,6 +4,7 @@ import Vue from 'vue';
import EksClusterConfigurationForm from '~/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue'; import EksClusterConfigurationForm from '~/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue';
import RegionDropdown from '~/create_cluster/eks_cluster/components/region_dropdown.vue'; import RegionDropdown from '~/create_cluster/eks_cluster/components/region_dropdown.vue';
import eksClusterFormState from '~/create_cluster/eks_cluster/store/state';
import clusterDropdownStoreState from '~/create_cluster/eks_cluster/store/cluster_dropdown/state'; import clusterDropdownStoreState from '~/create_cluster/eks_cluster/store/cluster_dropdown/state';
const localVue = createLocalVue(); const localVue = createLocalVue();
...@@ -12,29 +13,59 @@ localVue.use(Vuex); ...@@ -12,29 +13,59 @@ localVue.use(Vuex);
describe('EksClusterConfigurationForm', () => { describe('EksClusterConfigurationForm', () => {
let store; let store;
let actions; let actions;
let state;
let regionsState; let regionsState;
let vpcsState;
let subnetsState;
let vpcsActions;
let regionsActions; let regionsActions;
let subnetsActions;
let vm; let vm;
beforeEach(() => { beforeEach(() => {
state = eksClusterFormState();
actions = { actions = {
setRegion: jest.fn(), setRegion: jest.fn(),
setVpc: jest.fn(), setVpc: jest.fn(),
setSubnet: jest.fn(),
}; };
regionsActions = { regionsActions = {
fetchItems: jest.fn(), fetchItems: jest.fn(),
}; };
vpcsActions = {
fetchItems: jest.fn(),
};
subnetsActions = {
fetchItems: jest.fn(),
};
regionsState = { regionsState = {
...clusterDropdownStoreState(), ...clusterDropdownStoreState(),
}; };
vpcsState = {
...clusterDropdownStoreState(),
};
subnetsState = {
...clusterDropdownStoreState(),
};
store = new Vuex.Store({ store = new Vuex.Store({
state,
actions, actions,
modules: { modules: {
vpcs: {
namespaced: true,
state: vpcsState,
actions: vpcsActions,
},
regions: { regions: {
namespaced: true, namespaced: true,
state: regionsState, state: regionsState,
actions: regionsActions, actions: regionsActions,
}, },
subnets: {
namespaced: true,
state: subnetsState,
actions: subnetsActions,
},
}, },
}); });
}); });
...@@ -51,6 +82,8 @@ describe('EksClusterConfigurationForm', () => { ...@@ -51,6 +82,8 @@ describe('EksClusterConfigurationForm', () => {
}); });
const findRegionDropdown = () => vm.find(RegionDropdown); const findRegionDropdown = () => vm.find(RegionDropdown);
const findVpcDropdown = () => vm.find('[field-id="eks-vpc"]');
const findSubnetDropdown = () => vm.find('[field-id="eks-subnet"]');
describe('when mounted', () => { describe('when mounted', () => {
it('fetches available regions', () => { it('fetches available regions', () => {
...@@ -62,16 +95,72 @@ describe('EksClusterConfigurationForm', () => { ...@@ -62,16 +95,72 @@ describe('EksClusterConfigurationForm', () => {
regionsState.isLoadingItems = true; regionsState.isLoadingItems = true;
return Vue.nextTick().then(() => { return Vue.nextTick().then(() => {
expect(findRegionDropdown().props('loading')).toEqual(regionsState.isLoadingItems); expect(findRegionDropdown().props('loading')).toBe(regionsState.isLoadingItems);
}); });
}); });
it('sets regions to RegionDropdown regions property', () => { it('sets regions to RegionDropdown regions property', () => {
expect(findRegionDropdown().props('regions')).toEqual(regionsState.items); expect(findRegionDropdown().props('regions')).toBe(regionsState.items);
}); });
it('sets loadingRegionsError to RegionDropdown error property', () => { it('sets loadingRegionsError to RegionDropdown error property', () => {
expect(findRegionDropdown().props('error')).toEqual(regionsState.loadingItemsError); expect(findRegionDropdown().props('error')).toBe(regionsState.loadingItemsError);
});
it('disables VpcDropdown when no region is selected', () => {
expect(findVpcDropdown().props('disabled')).toBe(true);
});
it('enables VpcDropdown when no region is selected', () => {
state.selectedRegion = { name: 'west-1 ' };
return Vue.nextTick().then(() => {
expect(findVpcDropdown().props('disabled')).toBe(false);
});
});
it('sets isLoadingVpcs to VpcDropdown loading property', () => {
vpcsState.isLoadingItems = true;
return Vue.nextTick().then(() => {
expect(findVpcDropdown().props('loading')).toBe(vpcsState.isLoadingItems);
});
});
it('sets vpcs to VpcDropdown items property', () => {
expect(findVpcDropdown().props('items')).toBe(vpcsState.items);
});
it('sets loadingVpcsError to VpcDropdown hasErrors property', () => {
expect(findVpcDropdown().props('hasErrors')).toBe(vpcsState.loadingItemsError);
});
it('disables SubnetDropdown when no vpc is selected', () => {
expect(findSubnetDropdown().props('disabled')).toBe(true);
});
it('enables SubnetDropdown when a vpc is selected', () => {
state.selectedVpc = { name: 'vpc-1 ' };
return Vue.nextTick().then(() => {
expect(findSubnetDropdown().props('disabled')).toBe(false);
});
});
it('sets isLoadingSubnets to SubnetDropdown loading property', () => {
subnetsState.isLoadingItems = true;
return Vue.nextTick().then(() => {
expect(findSubnetDropdown().props('loading')).toBe(subnetsState.isLoadingItems);
});
});
it('sets subnets to SubnetDropdown items property', () => {
expect(findSubnetDropdown().props('items')).toBe(subnetsState.items);
});
it('sets loadingSubnetsError to SubnetDropdown hasErrors property', () => {
expect(findSubnetDropdown().props('hasErrors')).toBe(subnetsState.loadingItemsError);
}); });
describe('when region is selected', () => { describe('when region is selected', () => {
...@@ -84,5 +173,37 @@ describe('EksClusterConfigurationForm', () => { ...@@ -84,5 +173,37 @@ describe('EksClusterConfigurationForm', () => {
it('dispatches setRegion action', () => { it('dispatches setRegion action', () => {
expect(actions.setRegion).toHaveBeenCalledWith(expect.anything(), { region }, undefined); expect(actions.setRegion).toHaveBeenCalledWith(expect.anything(), { region }, undefined);
}); });
it('fetches available vpcs', () => {
expect(vpcsActions.fetchItems).toHaveBeenCalledWith(expect.anything(), { region }, undefined);
});
});
describe('when vpc is selected', () => {
const vpc = { name: 'vpc-1' };
beforeEach(() => {
findVpcDropdown().vm.$emit('input', vpc);
});
it('dispatches setVpc action', () => {
expect(actions.setVpc).toHaveBeenCalledWith(expect.anything(), { vpc }, undefined);
});
it('dispatches fetchSubnets action', () => {
expect(subnetsActions.fetchItems).toHaveBeenCalledWith(expect.anything(), { vpc }, undefined);
});
});
describe('when a subnet is selected', () => {
const subnet = { name: 'subnet-1' };
beforeEach(() => {
findSubnetDropdown().vm.$emit('input', subnet);
});
it('dispatches setSubnet action', () => {
expect(actions.setSubnet).toHaveBeenCalledWith(expect.anything(), { subnet }, undefined);
});
}); });
}); });
import testAction from 'helpers/vuex_action_helper'; import testAction from 'helpers/vuex_action_helper';
import createState from '~/create_cluster/eks_cluster/store/state'; import createState from '~/create_cluster/eks_cluster/store/state';
import * as types from '~/create_cluster/eks_cluster/store/mutation_types';
import * as actions from '~/create_cluster/eks_cluster/store/actions'; import * as actions from '~/create_cluster/eks_cluster/store/actions';
import { SET_REGION, SET_VPC, SET_SUBNET } from '~/create_cluster/eks_cluster/store/mutation_types';
describe('EKS Cluster Store Actions', () => { describe('EKS Cluster Store Actions', () => {
describe('setRegion', () => { let region;
it(`commits ${types.SET_REGION} mutation`, () => { let vpc;
const region = { name: 'west-1' }; let subnet;
testAction(actions.setRegion, { region }, createState(), [ beforeEach(() => {
{ type: types.SET_REGION, payload: { region } }, region = { name: 'regions-1' };
]); vpc = { name: 'vpc-1' };
}); subnet = { name: 'subnet-1' };
});
it.each`
action | mutation | payload | payloadDescription
${'setRegion'} | ${SET_REGION} | ${{ region }} | ${'region'}
${'setVpc'} | ${SET_VPC} | ${{ vpc }} | ${'vpc'}
${'setSubnet'} | ${SET_SUBNET} | ${{ subnet }} | ${'subnet'}
`(`$action commits $mutation with $payloadDescription payload`, data => {
const { action, mutation, payload } = data;
testAction(actions[action], payload, createState(), [{ type: mutation, payload }]);
}); });
}); });
import { SET_REGION } from '~/create_cluster/eks_cluster/store/mutation_types'; import { SET_REGION, SET_VPC, SET_SUBNET } from '~/create_cluster/eks_cluster/store/mutation_types';
import createState from '~/create_cluster/eks_cluster/store/state'; import createState from '~/create_cluster/eks_cluster/store/state';
import mutations from '~/create_cluster/eks_cluster/store/mutations'; import mutations from '~/create_cluster/eks_cluster/store/mutations';
describe('Create EKS cluster store mutations', () => { describe('Create EKS cluster store mutations', () => {
let state; let state;
let region; let region;
let vpc;
let subnet;
beforeEach(() => { beforeEach(() => {
region = { name: 'regions-1' }; region = { name: 'regions-1' };
vpc = { name: 'vpc-1' };
subnet = { name: 'subnet-1' };
state = createState(); state = createState();
}); });
it.each` it.each`
mutation | mutatedProperty | payload | expectedValue | expectedValueDescription mutation | mutatedProperty | payload | expectedValue | expectedValueDescription
${SET_REGION} | ${'selectedRegion'} | ${{ region }} | ${region} | ${'selected region payload'} ${SET_REGION} | ${'selectedRegion'} | ${{ region }} | ${region} | ${'selected region payload'}
${SET_VPC} | ${'selectedVpc'} | ${{ vpc }} | ${vpc} | ${'selected vpc payload'}
${SET_SUBNET} | ${'selectedSubnet'} | ${{ subnet }} | ${subnet} | ${'selected sybnet payload'}
`(`$mutation sets $mutatedProperty to $expectedValueDescription`, data => { `(`$mutation sets $mutatedProperty to $expectedValueDescription`, data => {
const { mutation, mutatedProperty, payload, expectedValue } = data; const { mutation, mutatedProperty, payload, expectedValue } = data;
......
...@@ -32,7 +32,7 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover do ...@@ -32,7 +32,7 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover do
if with_file if with_file
upload = create(:upload, :with_file, :attachment_upload, params) upload = create(:upload, :with_file, :attachment_upload, params)
model.update(attachment: upload.build_uploader) model.update(attachment: upload.retrieve_uploader)
model.attachment.upload model.attachment.upload
else else
create(:upload, :attachment_upload, params) create(:upload, :attachment_upload, params)
......
...@@ -24,7 +24,7 @@ describe Gitlab::BackgroundMigration::LegacyUploadsMigrator do ...@@ -24,7 +24,7 @@ describe Gitlab::BackgroundMigration::LegacyUploadsMigrator do
if with_file if with_file
upload = create(:upload, :with_file, :attachment_upload, params) upload = create(:upload, :with_file, :attachment_upload, params)
model.update(attachment: upload.build_uploader) model.update(attachment: upload.retrieve_uploader)
model.attachment.upload model.attachment.upload
else else
create(:upload, :attachment_upload, params) create(:upload, :attachment_upload, params)
......
...@@ -83,7 +83,7 @@ describe Gitlab::ImportExport::UploadsManager do ...@@ -83,7 +83,7 @@ describe Gitlab::ImportExport::UploadsManager do
it 'restores the file' do it 'restores the file' do
manager.restore manager.restore
expect(project.uploads.map { |u| u.build_uploader.filename }).to include('dummy.txt') expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('dummy.txt')
end end
end end
end end
...@@ -27,7 +27,7 @@ describe Gitlab::ImportExport::UploadsRestorer do ...@@ -27,7 +27,7 @@ describe Gitlab::ImportExport::UploadsRestorer do
it 'copies the uploads to the project path' do it 'copies the uploads to the project path' do
subject.restore subject.restore
expect(project.uploads.map { |u| u.build_uploader.filename }).to include('dummy.txt') expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('dummy.txt')
end end
end end
...@@ -43,7 +43,7 @@ describe Gitlab::ImportExport::UploadsRestorer do ...@@ -43,7 +43,7 @@ describe Gitlab::ImportExport::UploadsRestorer do
it 'copies the uploads to the project path' do it 'copies the uploads to the project path' do
subject.restore subject.restore
expect(project.uploads.map { |u| u.build_uploader.filename }).to include('dummy.txt') expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('dummy.txt')
end end
end end
end end
......
...@@ -58,7 +58,7 @@ describe Gitlab::Sanitizers::Exif do ...@@ -58,7 +58,7 @@ describe Gitlab::Sanitizers::Exif do
end end
describe '#clean' do describe '#clean' do
let(:uploader) { create(:upload, :with_file, :issuable_upload).build_uploader } let(:uploader) { create(:upload, :with_file, :issuable_upload).retrieve_uploader }
context "no dry run" do context "no dry run" do
it "removes exif from the image" do it "removes exif from the image" do
......
...@@ -71,26 +71,30 @@ describe Gitlab do ...@@ -71,26 +71,30 @@ describe Gitlab do
end end
describe '.com?' do describe '.com?' do
before do
Thread.current[:is_com] = nil
end
it 'is true when on GitLab.com' do it 'is true when on GitLab.com' do
stub_config_setting(url: 'https://gitlab.com') allow(LightSettings).to receive(:host).and_return('gitlab.com')
expect(described_class.com?).to eq true expect(described_class.com?).to eq true
end end
it 'is true when on staging' do it 'is true when on staging' do
stub_config_setting(url: 'https://staging.gitlab.com') allow(LightSettings).to receive(:host).and_return('staging.gitlab.com')
expect(described_class.com?).to eq true expect(described_class.com?).to eq true
end end
it 'is true when on other gitlab subdomain' do it 'is true when on other gitlab subdomain' do
stub_config_setting(url: 'https://example.gitlab.com') allow(LightSettings).to receive(:host).and_return('example.gitlab.com')
expect(described_class.com?).to eq true expect(described_class.com?).to eq true
end end
it 'is false when not on GitLab.com' do it 'is false when not on GitLab.com' do
stub_config_setting(url: 'http://example.com') allow(LightSettings).to receive(:host).and_return('example.com')
expect(described_class.com?).to eq false expect(described_class.com?).to eq false
end end
......
...@@ -149,6 +149,17 @@ describe CommitCollection do ...@@ -149,6 +149,17 @@ describe CommitCollection do
collection.enrich! collection.enrich!
end end
it 'returns the original commit if the commit could not be lazy loaded' do
collection = described_class.new(project, [hash_commit])
unexisting_lazy_commit = Commit.lazy(project, Gitlab::Git::BLANK_SHA)
expect(Commit).to receive(:lazy).with(project, hash_commit.id).and_return(unexisting_lazy_commit)
collection.enrich!
expect(collection.commits).to contain_exactly(hash_commit)
end
end end
end end
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
require 'spec_helper' require 'spec_helper'
describe Upload do describe Upload do
describe 'assocations' do describe 'associations' do
it { is_expected.to belong_to(:model) } it { is_expected.to belong_to(:model) }
end end
...@@ -107,6 +107,52 @@ describe Upload do ...@@ -107,6 +107,52 @@ describe Upload do
end end
end end
describe '#build_uploader' do
it 'returns a uploader object with current upload associated with it' do
subject = build(:upload)
uploader = subject.build_uploader
expect(uploader.upload).to eq(subject)
expect(uploader.mounted_as).to eq(subject.send(:mount_point))
expect(uploader.file).to be_nil
end
end
describe '#retrieve_uploader' do
it 'returns a uploader object with current uploader associated with and cache retrieved' do
subject = build(:upload)
uploader = subject.retrieve_uploader
expect(uploader.upload).to eq(subject)
expect(uploader.mounted_as).to eq(subject.send(:mount_point))
expect(uploader.file).not_to be_nil
end
end
describe '#needs_checksum?' do
context 'with local storage' do
it 'returns true when no checksum exists' do
subject = create(:upload, :with_file, checksum: nil)
expect(subject.needs_checksum?).to be_truthy
end
it 'returns false when checksum is already present' do
subject = create(:upload, :with_file, checksum: 'something')
expect(subject.needs_checksum?).to be_falsey
end
end
context 'with remote storage' do
subject { build(:upload, :object_storage) }
it 'returns false' do
expect(subject.needs_checksum?).to be_falsey
end
end
end
describe '#exist?' do describe '#exist?' do
it 'returns true when the file exists' do it 'returns true when the file exists' do
upload = described_class.new(path: __FILE__, store: ObjectStorage::Store::LOCAL) upload = described_class.new(path: __FILE__, store: ObjectStorage::Store::LOCAL)
......
...@@ -44,7 +44,7 @@ describe Uploads::Fog do ...@@ -44,7 +44,7 @@ describe Uploads::Fog do
subject { data_store.delete_keys(keys) } subject { data_store.delete_keys(keys) }
before do before do
uploads.each { |upload| upload.build_uploader.migrate!(2) } uploads.each { |upload| upload.retrieve_uploader.migrate!(2) }
end end
it 'deletes multiple data' do it 'deletes multiple data' do
......
...@@ -104,7 +104,7 @@ shared_examples 'handle uploads' do ...@@ -104,7 +104,7 @@ shared_examples 'handle uploads' do
context "when neither the uploader nor the model exists" do context "when neither the uploader nor the model exists" do
before do before do
allow_any_instance_of(Upload).to receive(:build_uploader).and_return(nil) allow_any_instance_of(Upload).to receive(:retrieve_uploader).and_return(nil)
allow(controller).to receive(:find_model).and_return(nil) allow(controller).to receive(:find_model).and_return(nil)
end end
......
...@@ -41,7 +41,8 @@ shared_examples_for 'model with uploads' do |supports_fileuploads| ...@@ -41,7 +41,8 @@ shared_examples_for 'model with uploads' do |supports_fileuploads|
end end
it 'deletes remote files' do it 'deletes remote files' do
expect_any_instance_of(Uploads::Fog).to receive(:delete_keys).with(uploads.map(&:path)) expected_array = array_including(*uploads.map(&:path))
expect_any_instance_of(Uploads::Fog).to receive(:delete_keys).with(expected_array)
model_object.destroy model_object.destroy
end end
......
...@@ -3,7 +3,7 @@ require 'spec_helper' ...@@ -3,7 +3,7 @@ require 'spec_helper'
describe FileUploader do describe FileUploader do
let(:group) { create(:group, name: 'awesome') } let(:group) { create(:group, name: 'awesome') }
let(:project) { create(:project, :legacy_storage, namespace: group, name: 'project') } let(:project) { create(:project, :legacy_storage, namespace: group, name: 'project') }
let(:uploader) { described_class.new(project) } let(:uploader) { described_class.new(project, :avatar) }
let(:upload) { double(model: project, path: 'secret/foo.jpg') } let(:upload) { double(model: project, path: 'secret/foo.jpg') }
subject { uploader } subject { uploader }
...@@ -184,6 +184,14 @@ describe FileUploader do ...@@ -184,6 +184,14 @@ describe FileUploader do
end end
end end
describe '#replace_file_without_saving!' do
let(:replacement) { Tempfile.create('replacement.jpg') }
it 'replaces an existing file without changing its metadata' do
expect { subject.replace_file_without_saving! CarrierWave::SanitizedFile.new(replacement) }.not_to change { subject.upload }
end
end
context 'when remote file is used' do context 'when remote file is used' do
let(:temp_file) { Tempfile.new("test") } let(:temp_file) { Tempfile.new("test") }
......
...@@ -69,6 +69,16 @@ describe GitlabUploader do ...@@ -69,6 +69,16 @@ describe GitlabUploader do
end end
end end
describe '#replace_file_without_saving!' do
it 'allows file to be replaced without triggering any callbacks' do
new_file = CarrierWave::SanitizedFile.new(Tempfile.new)
expect(subject).not_to receive(:with_callbacks)
subject.replace_file_without_saving!(new_file)
end
end
describe '#open' do describe '#open' do
context 'when trace is stored in File storage' do context 'when trace is stored in File storage' do
context 'when file exists' do context 'when file exists' do
......
...@@ -42,33 +42,23 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do ...@@ -42,33 +42,23 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
end end
describe '.sanity_check!' do describe '.sanity_check!' do
shared_examples 'raises a SanityCheckError' do shared_examples 'raises a SanityCheckError' do |expected_message|
let(:mount_point) { nil } let(:mount_point) { nil }
it do it do
expect { described_class.sanity_check!(uploads, model_class, mount_point) } expect { described_class.sanity_check!(uploads, model_class, mount_point) }
.to raise_error(described_class::SanityCheckError) .to raise_error(described_class::SanityCheckError).with_message(expected_message)
end end
end end
before do
stub_const("WrongModel", Class.new)
end
context 'uploader types mismatch' do context 'uploader types mismatch' do
let!(:outlier) { create(:upload, uploader: 'GitlabUploader') } let!(:outlier) { create(:upload, uploader: 'GitlabUploader') }
include_examples 'raises a SanityCheckError' include_examples 'raises a SanityCheckError', /Multiple uploaders found/
end
context 'model types mismatch' do
let!(:outlier) { create(:upload, model_type: 'WrongModel') }
include_examples 'raises a SanityCheckError'
end end
context 'mount point not found' do context 'mount point not found' do
include_examples 'raises a SanityCheckError' do include_examples 'raises a SanityCheckError', /Mount point [a-z:]+ not found in/ do
let(:mount_point) { :potato } let(:mount_point) { :potato }
end end
end end
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册