update_pages_service.rb 4.6 KB
Newer Older
1
module Projects
K
Kamil Trzcinski 已提交
2
  class UpdatePagesService < BaseService
3 4
    BLOCK_SIZE = 32.kilobytes
    MAX_SIZE = 1.terabyte
D
Douwe Maan 已提交
5
    SITE_PATH = 'public/'.freeze
6 7 8 9 10 11 12 13 14 15

    attr_reader :build

    def initialize(project, build)
      @project, @build = project, build
    end

    def execute
      # Create status notifying the deployment of pages
      @status = create_status
K
Kamil Trzcinski 已提交
16
      @status.enqueue!
17 18 19 20 21 22 23 24 25 26 27 28
      @status.run!

      raise 'missing pages artifacts' unless build.artifacts_file?
      raise 'pages are outdated' unless latest?

      # Create temporary directory in which we will extract the artifacts
      FileUtils.mkdir_p(tmp_path)
      Dir.mktmpdir(nil, tmp_path) do |archive_path|
        extract_archive!(archive_path)

        # Check if we did extract public directory
        archive_public_path = File.join(archive_path, 'public')
29
        raise 'pages miss the public folder' unless Dir.exist?(archive_public_path)
30 31 32 33 34 35 36
        raise 'pages are outdated' unless latest?

        deploy_page!(archive_public_path)
        success
      end
    rescue => e
      error(e.message)
37 38
    ensure
      build.erase_artifacts! unless build.has_expiring_artifacts?
39 40 41 42 43 44 45 46 47 48
    end

    private

    def success
      @status.success
      super
    end

    def error(message, http_status = nil)
D
Danilo Bargen 已提交
49
      log_error("Projects::UpdatePagesService: #{message}")
50 51 52 53 54 55 56 57 58
      @status.allow_failure = !latest?
      @status.description = message
      @status.drop
      super
    end

    def create_status
      GenericCommitStatus.new(
        project: project,
59
        pipeline: build.pipeline,
60 61 62 63 64 65 66 67
        user: build.user,
        ref: build.ref,
        stage: 'deploy',
        name: 'pages:deploy'
      )
    end

    def extract_archive!(temp_path)
68 69 70 71 72 73 74 75 76 77
      if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz')
        extract_tar_archive!(temp_path)
      elsif artifacts.ends_with?('.zip')
        extract_zip_archive!(temp_path)
      else
        raise 'unsupported artifacts format'
      end
    end

    def extract_tar_archive!(temp_path)
78 79
      results = Open3.pipeline(%W(gunzip -c #{artifacts}),
                               %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
80
                               %W(tar -x -C #{temp_path} #{SITE_PATH}),
81 82 83 84
                               err: '/dev/null')
      raise 'pages failed to extract' unless results.compact.all?(&:success?)
    end

85 86 87 88 89 90 91
    def extract_zip_archive!(temp_path)
      raise 'missing artifacts metadata' unless build.artifacts_metadata?

      # Calculate page size after extract
      public_entry = build.artifacts_metadata_entry(SITE_PATH, recursive: true)

      if public_entry.total_size > max_size
92
        raise "artifacts for pages are too large: #{public_entry.total_size}"
93 94 95 96 97 98 99 100 101 102 103
      end

      # Requires UnZip at least 6.00 Info-ZIP.
      # -n  never overwrite existing files
      # We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
      site_path = File.join(SITE_PATH, '*')
      unless system(*%W(unzip -n #{artifacts} #{site_path} -d #{temp_path}))
        raise 'pages failed to extract'
      end
    end

104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127
    def deploy_page!(archive_public_path)
      # Do atomic move of pages
      # Move and removal may not be atomic, but they are significantly faster then extracting and removal
      # 1. We move deployed public to previous public path (file removal is slow)
      # 2. We move temporary public to be deployed public
      # 3. We remove previous public path
      FileUtils.mkdir_p(pages_path)
      begin
        FileUtils.move(public_path, previous_public_path)
      rescue
      end
      FileUtils.move(archive_public_path, public_path)
    ensure
      FileUtils.rm_r(previous_public_path, force: true)
    end

    def latest?
      # check if sha for the ref is still the most recent one
      # this helps in case when multiple deployments happens
      sha == latest_sha
    end

    def blocks
      # Calculate dd parameters: we limit the size of pages
128 129 130 131 132
      1 + max_size / BLOCK_SIZE
    end

    def max_size
      current_application_settings.max_pages_size.megabytes || MAX_SIZE
133 134 135
    end

    def tmp_path
136
      @tmp_path ||= File.join(::Settings.pages.path, 'tmp')
137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
    end

    def pages_path
      @pages_path ||= project.pages_path
    end

    def public_path
      @public_path ||= File.join(pages_path, 'public')
    end

    def previous_public_path
      @previous_public_path ||= File.join(pages_path, "public.#{SecureRandom.hex}")
    end

    def ref
      build.ref
    end

    def artifacts
      build.artifacts_file.path
    end

    def latest_sha
      project.commit(build.ref).try(:sha).to_s
    end

    def sha
      build.sha
    end
  end
end