Skip to content
Snippets Groups Projects
Unverified Commit 757742de authored by Kamil Trzcińśki's avatar Kamil Trzcińśki
Browse files

Support cache storage

parent f3eb2314
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -316,7 +316,7 @@ module Ci
end
 
def browsable_artifacts?
!Gitlab.config.artifacts.object_store.enabled && artifacts_metadata?
!artifacts_file.remote_storage? && artifacts_metadata?
end
 
def artifacts_metadata?
Loading
Loading
@@ -324,12 +324,14 @@ module Ci
end
 
def artifacts_metadata_entry(path, **options)
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
artifacts_metadata.path,
path,
**options)
artifacts_metadata.use_file do |metadata_path|
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
metadata_path,
path,
**options)
 
metadata.to_entry
metadata.to_entry
end
end
 
def erase_artifacts!
Loading
Loading
Loading
Loading
@@ -8,9 +8,6 @@ module Projects
 
def initialize(project, job)
@project, @job = project, job
# If we store artifacts on object store, we need to get them local
extractable_artifacts
end
 
def execute
Loading
Loading
@@ -39,7 +36,6 @@ module Projects
error(e.message)
ensure
job.erase_artifacts! unless job.has_expiring_artifacts?
FileUtils.rm_rf(artifacts) if Gitlab.config.artifacts.object_store.enabled
end
 
private
Loading
Loading
@@ -90,8 +86,11 @@ module Projects
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
unless system(*%W(unzip -n #{artifacts} #{site_path} -d #{temp_path}))
raise 'pages failed to extract'
job.artifacts_file.use_file do |artifacts_path|
unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
raise 'pages failed to extract'
end
end
end
 
Loading
Loading
@@ -146,17 +145,6 @@ module Projects
job.ref
end
 
def artifacts
job.artifacts_file.path
end
def extractable_artifacts
return unless Gitlab.config.artifacts.object_store.enabled
job.artifacts_file.download!(job.artifacts_file.url)
job.artifacts_metadata.download!(job.artifacts_metadata.url)
end
def latest_sha
project.commit(job.ref).try(:sha).to_s
end
Loading
Loading
Loading
Loading
@@ -3,84 +3,39 @@ class ArtifactUploader < GitlabUploader
 
storage_options Gitlab.config.artifacts
 
def self.artifacts_path
if object_store_options.enabled
""
else
storage_options.path + "/"
end
end
def artifacts_path
self.class.artifacts_path
end
def self.artifacts_upload_path
self.artifacts_path + 'tmp/uploads'
end
def self.artifacts_cache_path
self.artifacts_path + 'tmp/cache'
end
attr_accessor :job, :field
def self.object_store_options
Gitlab.config.artifacts.object_store
end
if object_store_options.enabled
storage :fog
else
storage :file
end
attr_reader :job, :field
 
def initialize(job, field)
@job, @field = job, field
end
 
def store_dir
self.class.artifacts_path + job.artifacts_path
if remote_cache_storage?
job.artifacts_path
else
File.join(storage_options.artifacts_path, job.artifacts_path)
end
end
 
def cache_dir
self.class.artifacts_cache_path + job.artifacts_path
end
def fog_directory
return super unless use_object_store?
self.class.object_store_options.bucket
end
# Override the credentials
def fog_credentials
return super unless use_object_store?
{
provider: object_store_options.provider,
aws_access_key_id: object_store_options.access_key_id,
aws_secret_access_key: object_store_options.secret_access_key,
region: object_store_options.region,
endpoint: object_store_options.endpoint,
path_style: true
}
if remote_cache_storage?
File.join('tmp/cache', job.artifacts_path)
else
File.join(storage_options.artifacts_path, 'tmp/cache', job.artifacts_path)
end
end
 
def exists?
file.try(:exists?)
end
 
def fog_public
false
end
def upload_authorize
result = { TempPath: ArtifactUploader.artifacts_upload_path }
self.cache_id = CarrierWave.generate_cache_id
self.original_filename = SecureRandom.hex
result = { TempPath: cache_path }
 
use_cache_object_storage do
self.cache_id = CarrierWave.generate_cache_id
self.original_filename = SecureRandom.hex
expire_at = ::Fog::Time.now + fog_authenticated_url_expiration
result[:UploadPath] = cache_name
result[:UploadURL] = storage.connection.put_object_url(
Loading
Loading
@@ -90,53 +45,32 @@ class ArtifactUploader < GitlabUploader
result
end
 
def upload_cache_path(path = nil)
File.join(cache_dir, path)
end
def cache!(new_file = nil)
use_cache_object_storage do
retrieve_from_cache!(new_file.upload_path)
@filename = new_file.original_filename
store_path
return
end if new_file&.upload_path
super
unless retrive_uploaded_file!(new_file&.upload_path, new_file.original_filename)
super
end
end
 
private
 
def object_store_options
self.class.object_store_options
end
def use_object_store?
object_store_options.enabled
end
def cache_storage
if @use_storage_for_cache
if @use_storage_for_cache || cached? && remote_file?
storage
else
super
end
end
 
def use_cache_object_storage
def retrive_uploaded_file!(identifier, filename)
return unless identifier
return unless filename
return unless use_object_store?
 
@use_storage_for_cache = true
yield
retrieve_from_cache!(identifier)
@filename = filename
ensure
@use_storage_for_cache = false
end
def move_to_store
storage.is_a?(CarrierWave::Storage::File)
end
def move_to_cache
cache_storage.is_a?(CarrierWave::Storage::File)
end
end
Loading
Loading
@@ -10,7 +10,7 @@ module ObjectStoreable
@storage_options = options
 
class_eval do
storage @storage_options.object_store.enabled ? :fog : :file
storage use_object_store? ? :fog : :file
end
end
end
Loading
Loading
@@ -30,6 +30,7 @@ module ObjectStoreable
aws_access_key_id: @storage_options.access_key_id,
aws_secret_access_key: @storage_options.secret_access_key,
region: @storage_options.region,
endpoint: @storage_options.endpoint,
path_style: true
}
end
Loading
Loading
@@ -41,4 +42,37 @@ module ObjectStoreable
def use_object_store?
@storage_options.object_store.enabled
end
def move_to_store
!use_object_store?
end
def move_to_cache
!use_object_store?
end
def remote_file?
file&.is_a?(CarrierWave::Storage::Fog::File)
end
def remote_storage?
storage.is_a?(CarrierWave::Storage::Fog)
end
def remote_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::Fog)
end
def use_file
if use_object_store?
return yield path
end
begin
cache_stored_file!
yield cache_path
ensure
cache_storage.delete_dir!(cache_path(nil))
end
end
end
Loading
Loading
@@ -2,19 +2,16 @@ CarrierWave::SanitizedFile.sanitize_regexp = /[^[:word:]\.\-\+]/
 
aws_file = Rails.root.join('config', 'aws.yml')
 
CarrierWave.configure do |config|
config.fog_provider = "fog/rackspace/storage"
config.fog_credentials = {
provider: 'AWS', # required
aws_access_key_id: 'ddd',
aws_secret_access_key: 'ccc',
}
end
if File.exist?(aws_file)
AWS_CONFIG = YAML.load(File.read(aws_file))[Rails.env]
 
CarrierWave.configure do |config|
config.fog_credentials = {
provider: 'AWS', # required
aws_access_key_id: AWS_CONFIG['access_key_id'], # required
aws_secret_access_key: AWS_CONFIG['secret_access_key'], # required
region: AWS_CONFIG['region'], # optional, defaults to 'us-east-1'
}
 
# required
config.fog_directory = AWS_CONFIG['bucket']
Loading
Loading
Loading
Loading
@@ -223,8 +223,7 @@ module API
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
 
if job.save
status :created
#present job, with: Entities::JobRequest::Response
present job, with: Entities::JobRequest::Response
else
render_validation_error!(job)
end
Loading
Loading
Loading
Loading
@@ -60,7 +60,7 @@ module Gitlab
begin
path = read_string(gz).force_encoding('UTF-8')
meta = read_string(gz).force_encoding('UTF-8')
next unless path.valid_encoding? && meta.valid_encoding?
next unless path =~ match_pattern
next if path =~ INVALID_PATH_PATTERN
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment