diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb index 715ab55a10094fa4dccee6e5940fdc2aaf0cf527..0056b329f40f660dd98bd210c10012c740e5e163 100644 --- a/app/models/ci/build.rb +++ b/app/models/ci/build.rb @@ -51,7 +51,6 @@ module Ci scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) } scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) } - scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) } scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) } scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) } scope :ref_protected, -> { where(protected: true) } diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb index baec2cbf4f5effaa1f5a276c2caf1ff5b26c1003..5bf8208e0357da86160167664ff234827bc66c98 100644 --- a/app/services/projects/update_pages_service.rb +++ b/app/services/projects/update_pages_service.rb @@ -71,9 +71,9 @@ module Projects end def extract_archive!(temp_path) - if artifacts_filename.ends_with?('.tar.gz') || artifacts_filename.ends_with?('.tgz') + if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz') extract_tar_archive!(temp_path) - elsif artifacts_filename.ends_with?('.zip') + elsif artifacts.ends_with?('.zip') extract_zip_archive!(temp_path) else raise FailedToExtractError, 'unsupported artifacts format' @@ -86,7 +86,7 @@ module Projects %W(dd bs=#{BLOCK_SIZE} count=#{blocks}), %W(tar -x -C #{temp_path} #{SITE_PATH}), err: '/dev/null') - raise 'pages failed to extract' unless results.compact.all?(&:success?) + raise FailedToExtractError, 'pages failed to extract' unless results.compact.all?(&:success?) end end @@ -107,7 +107,7 @@ module Projects site_path = File.join(SITE_PATH, '*') build.artifacts_file.use_file do |artifacts_path| unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path})) - raise 'pages failed to extract' + raise FailedToExtractError, 'pages failed to extract' end end end @@ -139,10 +139,6 @@ module Projects 1 + max_size / BLOCK_SIZE end - def artifacts_filename - build.artifacts_file.filename - end - def max_size max_pages_size = Gitlab::CurrentSettings.max_pages_size.megabytes @@ -171,6 +167,15 @@ module Projects build.ref end + def artifacts + build.artifacts_file.path + end + + def delete_artifact! + build.reload # Reload stable object to prevent erase artifacts with old state + build.erase_artifacts! unless build.has_expiring_artifacts? + end + def latest_sha project.commit(build.ref).try(:sha).to_s end diff --git a/app/views/projects/jobs/_sidebar.html.haml b/app/views/projects/jobs/_sidebar.html.haml index 2a341569fb597a3a60473f790c7bbd48cbcab7f4..a159a9ef08e4c60f81fa4d8632ebc48e1ae30199 100644 --- a/app/views/projects/jobs/_sidebar.html.haml +++ b/app/views/projects/jobs/_sidebar.html.haml @@ -35,9 +35,9 @@ = link_to download_project_job_artifacts_path(@project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do Download - - if @build.browsable_artifacts? - = link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do - Browse + - if @build.browsable_artifacts? + = link_to browse_project_job_artifacts_path(@project, @build), class: 'btn btn-sm btn-default' do + Browse - if @build.trigger_request .build-widget.block diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml index cf3735460b2996e79ad6fcc38f9ad535d91a2f81..9a11cdb121e6b34d592c5e1d6526f15c820b661e 100644 --- a/app/workers/all_queues.yml +++ b/app/workers/all_queues.yml @@ -108,5 +108,3 @@ - update_user_activity - upload_checksum - web_hook - - diff --git a/doc/administration/geo/replication/object_storage.md b/doc/administration/geo/replication/object_storage.md deleted file mode 100644 index 36d9cf7af834d5e62c5af873f624ea075ae4ea4a..0000000000000000000000000000000000000000 --- a/doc/administration/geo/replication/object_storage.md +++ /dev/null @@ -1,38 +0,0 @@ -# Geo with Object storage - -Geo can be used in combination with Object Storage (AWS S3, or -other compatible object storage). - -## Configuration - -At this time it is required that if object storage is enabled on the -primary, it must also be enabled on the secondary. - -The secondary nodes can use the same storage bucket as the primary, or -they can use a replicated storage bucket. At this time GitLab does not -take care of content replication in object storage. - -For LFS, follow the documentation to -[set up LFS object storage](../../../workflow/lfs/lfs_administration.md#setting-up-s3-compatible-object-storage). - -For CI job artifacts, there is similar documentation to configure -[jobs artifact object storage](../../job_artifacts.md#using-object-storage) - -Complete these steps on all nodes, primary **and** secondary. - -## Replication - -When using Amazon S3, you can use -[CRR](https://docs.aws.amazon.com/AmazonS3/latest/dev/crr.html) to -have automatic replication between the bucket used by the primary and -the bucket used by the secondary. - -If you are using Google Cloud Storage, consider using -[Multi-Regional Storage](https://cloud.google.com/storage/docs/storage-classes#multi-regional). -Or you can use the [Storage Transfer Service](https://cloud.google.com/storage/transfer/), -although this only supports daily synchronization. - -For manual synchronization, or scheduled by `cron`, please have a look at: - -- [`s3cmd sync`](http://s3tools.org/s3cmd-sync) -- [`gsutil rsync`](https://cloud.google.com/storage/docs/gsutil/commands/rsync) diff --git a/doc/gitlab-geo/object_storage.md b/doc/gitlab-geo/object_storage.md deleted file mode 100644 index 272bc13c45095b8f8932eb2ce4c653729f1cce33..0000000000000000000000000000000000000000 --- a/doc/gitlab-geo/object_storage.md +++ /dev/null @@ -1 +0,0 @@ -This document was moved to [another location](../administration/geo/replication/object_storage.md). diff --git a/doc/workflow/lfs/lfs_administration.md b/doc/workflow/lfs/lfs_administration.md index fdf31a3bac731555b68090a6d24fa6fb7b28bfae..a3c9c878fe517c924c04a522613e22c007637556 100644 --- a/doc/workflow/lfs/lfs_administration.md +++ b/doc/workflow/lfs/lfs_administration.md @@ -47,7 +47,7 @@ In `config/gitlab.yml`: ## Setting up S3 compatible object storage > **Note:** [Introduced][ee-2760] in [GitLab Premium][eep] 10.0. -> Available in [GitLab CE][ce] 10.6 +> Available in [GitLab CE][ce] 10.7 It is possible to store LFS objects on remote object storage instead of on a local disk. diff --git a/lib/tasks/gitlab/uploads/helpers.rb b/lib/tasks/gitlab/uploads/helpers.rb deleted file mode 100644 index 239cba57297dbf9c3e6363539104f16131659e15..0000000000000000000000000000000000000000 --- a/lib/tasks/gitlab/uploads/helpers.rb +++ /dev/null @@ -1,23 +0,0 @@ -module UploadTaskHelpers - def batch_size - ENV.fetch('BATCH', 200).to_i - end - - def calculate_checksum(absolute_path) - Digest::SHA256.file(absolute_path).hexdigest - end - - def check_checksum(upload) - checksum = calculate_checksum(upload.absolute_path) - - if checksum != upload.checksum - puts " * File checksum (#{checksum}) does not match the one in the database (#{upload.checksum})".color(:red) - end - end - - def uploads_batches(&block) - Upload.all.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches - yield relation - end - end -end