Skip to content
Snippets Groups Projects
Unverified Commit 4440380f authored by Z.J. van de Weg's avatar Z.J. van de Weg Committed by Kamil Trzcinski
Browse files

No browsable artifacts when using object store

parent 65dfe7f9
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -315,11 +315,17 @@ module Ci
!artifacts_expired? && artifacts_file.exists?
end
 
def browsable_artifacts?
!Gitlab.config.artifacts.object_store.enabled && artifacts_metadata?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
 
def artifacts_metadata_entry(path, **options)
puts "artifacts metadata_entry for path: #{path} and options: #{options}"
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path,
path,
Loading
Loading
require 'tempfile'
module Projects
class UpdatePagesService < BaseService
BLOCK_SIZE = 32.kilobytes
MAX_SIZE = 1.terabyte
SITE_PATH = 'public/'.freeze
 
attr_reader :build
attr_reader :job
 
def initialize(project, build)
@project, @build = project, build
def initialize(project, job)
@project, @job = project, job
end
 
def execute
Loading
Loading
@@ -16,7 +18,7 @@ module Projects
@status.enqueue!
@status.run!
 
raise 'missing pages artifacts' unless build.artifacts_file?
raise 'missing pages artifacts' unless job.artifacts_file?
raise 'pages are outdated' unless latest?
 
# Create temporary directory in which we will extract the artifacts
Loading
Loading
@@ -35,7 +37,9 @@ module Projects
rescue => e
error(e.message)
ensure
build.erase_artifacts! unless build.has_expiring_artifacts?
job.erase_artifacts! unless job.has_expiring_artifacts?
temp_file&.close
temp_file&.unlink
end
 
private
Loading
Loading
@@ -56,9 +60,9 @@ module Projects
def create_status
GenericCommitStatus.new(
project: project,
pipeline: build.pipeline,
user: build.user,
ref: build.ref,
pipeline: job.pipeline,
user: job.user,
ref: job.ref,
stage: 'deploy',
name: 'pages:deploy'
)
Loading
Loading
@@ -75,28 +79,20 @@ module Projects
end
 
def extract_tar_archive!(temp_path)
results = Open3.pipeline(%W(gunzip -c #{artifacts}),
results = Open3.pipeline(%W(gunzip -c #{extractable_artifacts}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} #{SITE_PATH}),
err: '/dev/null')
raise 'pages failed to extract' unless results.compact.all?(&:success?)
end
 
def extract_zip_archive!(temp_path)
raise 'missing artifacts metadata' unless build.artifacts_metadata?
# Calculate page size after extract
public_entry = build.artifacts_metadata_entry(SITE_PATH, recursive: true)
if public_entry.total_size > max_size
raise "artifacts for pages are too large: #{public_entry.total_size}"
end
# Requires UnZip at least 6.00 Info-ZIP.
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
unless system(*%W(unzip -n #{artifacts} #{site_path} -d #{temp_path}))
unless system(*%W(unzip -n #{extractable_artifacts} #{site_path} -d #{temp_path}))
raise 'pages failed to extract'
end
end
Loading
Loading
@@ -149,19 +145,39 @@ module Projects
end
 
def ref
build.ref
job.ref
end
 
def artifacts
build.artifacts_file.path
job.artifacts_file.path
end
# If we're using S3 for storage, we first need to read all the data.
# This is done using a tempfile as artifacts will be GC'ed
def extractable_artifacts
if Gitlab.config.artifacts.object_store.enabled
artifacts
else
temp_file.path
end
end
def temp_file
@temp_file ||=
begin
file = Tempfile.new("pages-artifacts-#{job.id}")
File.open(file, 'wb') { file.write(job.artifacts_file.read) }
file
end
end
 
def latest_sha
project.commit(build.ref).try(:sha).to_s
project.commit(job.ref).try(:sha).to_s
end
 
def sha
build.sha
job.sha
end
end
end
class ArtifactUploader < GitlabUploader
storage Gitlab.config.artifacts.object_store ? :fog : :file
attr_accessor :job, :field
 
def self.artifacts_path
Loading
Loading
@@ -15,6 +13,12 @@ class ArtifactUploader < GitlabUploader
File.join(self.artifacts_path, 'tmp/cache/')
end
 
def self.object_store_options
Gitlab.config.artifacts.object_store
end
storage object_store_options.enabled ? :fog : :file
def initialize(job, field)
@job, @field = job, field
end
Loading
Loading
@@ -27,6 +31,25 @@ class ArtifactUploader < GitlabUploader
File.join(self.class.artifacts_cache_path, job.artifacts_path)
end
 
def fog_directory
return super unless use_object_store?
self.class.object_store_options.bucket
end
# Override the credentials
def fog_credentials
return super unless use_object_store?
{
provider: object_store_options.provider,
aws_access_key_id: object_store_options.access_key_id,
aws_secret_access_key: object_store_options.secret_access_key,
region: object_store_options.region,
path_style: true
}
end
def filename
file.try(:filename)
end
Loading
Loading
@@ -34,4 +57,14 @@ class ArtifactUploader < GitlabUploader
def exists?
file.try(:exists?)
end
private
def object_store_options
self.class.object_store_options
end
def use_object_store?
object_store_options.enabled
end
end
Loading
Loading
@@ -36,7 +36,7 @@
= link_to download_namespace_project_build_artifacts_path(@project.namespace, @project, @build), rel: 'nofollow', download: '', class: 'btn btn-sm btn-default' do
Download
 
- if @build.artifacts_metadata?
- if @build.browsable_artifacts?
= link_to browse_namespace_project_build_artifacts_path(@project.namespace, @project, @build), class: 'btn btn-sm btn-default' do
Browse
 
Loading
Loading
Loading
Loading
@@ -7,12 +7,14 @@ class PagesWorker
send(action, *arg)
end
 
def deploy(build_id)
build = Ci::Build.find_by(id: build_id)
result = Projects::UpdatePagesService.new(build.project, build).execute
def deploy(job_id)
job = Ci::Build.find_by(id: job_id)
result = Projects::UpdatePagesService.new(job.project, job).execute
if result[:status] == :success
result = Projects::UpdatePagesConfigurationService.new(build.project).execute
result = Projects::UpdatePagesConfigurationService.new(job.project).execute
end
result
end
 
Loading
Loading
Loading
Loading
@@ -89,7 +89,9 @@ _The artifacts are stored by default in
The previously mentioned methods use the local disk to store artifacts. However,
there is the option to use object stores like AWS' S3. To do this, set the
`object_store` flag to true in your `gitlab.rb`. This relies on valid AWS
credentials to be configured already.
credentials to be configured already. Please note, that enabling this feature
will have the effect that artifacts are _not_ browsable anymore through the web
interface.
 
## Set the maximum file size of the artifacts
 
Loading
Loading
Loading
Loading
@@ -60,7 +60,7 @@ module Gitlab
begin
path = read_string(gz).force_encoding('UTF-8')
meta = read_string(gz).force_encoding('UTF-8')
next unless path.valid_encoding? && meta.valid_encoding?
next unless path =~ match_pattern
next if path =~ INVALID_PATH_PATTERN
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment