Skip to content
Snippets Groups Projects
Unverified Commit a830614f authored by Z.J. van de Weg's avatar Z.J. van de Weg Committed by Kamil Trzcinski
Browse files

Use download! to fetch remote files

Also reintroduces the file size test
parent 7f278bb5
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -324,8 +324,6 @@ module Ci
end
 
def artifacts_metadata_entry(path, **options)
puts "artifacts metadata_entry for path: #{path} and options: #{options}"
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path,
path,
Loading
Loading
require 'tempfile'
module Projects
class UpdatePagesService < BaseService
BLOCK_SIZE = 32.kilobytes
Loading
Loading
@@ -10,6 +8,9 @@ module Projects
 
def initialize(project, job)
@project, @job = project, job
# If we store artifacts on object store, we need to get them local
extractable_artifacts
end
 
def execute
Loading
Loading
@@ -24,7 +25,6 @@ module Projects
# Create temporary directory in which we will extract the artifacts
FileUtils.mkdir_p(tmp_path)
Dir.mktmpdir(nil, tmp_path) do |archive_path|
puts 'starting extraction'
extract_archive!(archive_path)
 
# Check if we did extract public directory
Loading
Loading
@@ -38,9 +38,7 @@ module Projects
rescue => e
error(e.message)
ensure
job.erase_artifacts! unless job.has_expiring_artifacts?
temp_file&.close
temp_file&.unlink
job.erase_artifacts! unless job.has_expiring_artifacts?
end
 
private
Loading
Loading
@@ -80,7 +78,7 @@ module Projects
end
 
def extract_tar_archive!(temp_path)
results = Open3.pipeline(%W(gunzip -c #{extractable_artifacts}),
results = Open3.pipeline(%W(gunzip -c #{artifacts}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} #{SITE_PATH}),
err: '/dev/null')
Loading
Loading
@@ -89,11 +87,20 @@ module Projects
end
 
def extract_zip_archive!(temp_path)
raise 'missing artifacts metadata' unless job.artifacts_metadata?
# Calculate page size after extract
public_entry = job.artifacts_metadata_entry(SITE_PATH, recursive: true)
if public_entry.total_size > max_size
raise "artifacts for pages are too large: #{public_entry.total_size}"
end
# Requires UnZip at least 6.00 Info-ZIP.
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
unless system(*%W(unzip -n #{extractable_artifacts} #{site_path} -d #{temp_path}))
unless system(*%W(unzip -n #{artifacts} #{site_path} -d #{temp_path}))
raise 'pages failed to extract'
end
end
Loading
Loading
@@ -153,24 +160,11 @@ module Projects
job.artifacts_file.path
end
 
# If we're using S3 for storage, we first need to read all the data.
# This is done using a tempfile as artifacts will be GC'ed
def extractable_artifacts
if Gitlab.config.artifacts.object_store.enabled
temp_file.path
else
artifacts
end
end
def temp_file
@temp_file ||=
begin
file = Tempfile.new(["#{job.id}-pages-artifacts", File.extname(artifacts)])
IO.binwrite(file, job.artifacts_file.read)
return unless Gitlab.config.artifacts.object_store.enabled
 
file
end
job.artifacts_file.download!(job.artifacts_file.url)
job.artifacts_metadata.download!(job.artifacts_metadata.url)
end
 
def latest_sha
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment