Skip to content
Snippets Groups Projects
Commit c4c8ca04 authored by Kamil Trzcińśki's avatar Kamil Trzcińśki Committed by James Edwards-Jones
Browse files

Added support for zip archives in pages

The ZIP archive size is calculated from artifacts metadata that should get uploaded for new artifacts
parent 6e70870a
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -2,6 +2,7 @@ module Projects
class UpdatePagesService < BaseService
BLOCK_SIZE = 32.kilobytes
MAX_SIZE = 1.terabyte
SITE_PATH = 'public/'
 
attr_reader :build
 
Loading
Loading
@@ -60,13 +61,42 @@ module Projects
end
 
def extract_archive!(temp_path)
if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz')
extract_tar_archive!(temp_path)
elsif artifacts.ends_with?('.zip')
extract_zip_archive!(temp_path)
else
raise 'unsupported artifacts format'
end
end
def extract_tar_archive!(temp_path)
results = Open3.pipeline(%W(gunzip -c #{artifacts}),
%W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
%W(tar -x -C #{temp_path} public/),
%W(tar -x -C #{temp_path} #{SITE_PATH}),
err: '/dev/null')
raise 'pages failed to extract' unless results.compact.all?(&:success?)
end
 
def extract_zip_archive!(temp_path)
raise 'missing artifacts metadata' unless build.artifacts_metadata?
# Calculate page size after extract
public_entry = build.artifacts_metadata_entry(SITE_PATH, recursive: true)
if public_entry.total_size > max_size
raise "artifacts for pages are too large: #{total_size}"
end
# Requires UnZip at least 6.00 Info-ZIP.
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
unless system(*%W(unzip -n #{artifacts} #{site_path} -d #{temp_path}))
raise 'pages failed to extract'
end
end
def deploy_page!(archive_public_path)
# Do atomic move of pages
# Move and removal may not be atomic, but they are significantly faster then extracting and removal
Loading
Loading
@@ -91,10 +121,11 @@ module Projects
 
def blocks
# Calculate dd parameters: we limit the size of pages
max_size = current_application_settings.max_pages_size.megabytes
max_size ||= MAX_SIZE
blocks = 1 + max_size / BLOCK_SIZE
blocks
1 + max_size / BLOCK_SIZE
end
def max_size
current_application_settings.max_pages_size.megabytes || MAX_SIZE
end
 
def tmp_path
Loading
Loading
File added
File added
File added
File added
Loading
Loading
@@ -4,9 +4,7 @@ describe Projects::UpdatePagesService do
let(:project) { create :project }
let(:commit) { create :ci_commit, project: project, sha: project.commit('HEAD').sha }
let(:build) { create :ci_build, commit: commit, ref: 'HEAD' }
let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages.tar.gz', 'application/octet-stream') }
let(:empty_file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages_empty.tar.gz', 'application/octet-stream') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'application/octet-stream') }
let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') }
subject { described_class.new(project, build) }
 
Loading
Loading
@@ -14,27 +12,50 @@ describe Projects::UpdatePagesService do
project.remove_pages
end
 
context 'for valid file' do
before { build.update_attributes(artifacts_file: file) }
%w(tar.gz zip).each do |format|
context "for valid #{format}" do
let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{format}") }
let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{format}") }
let(:metadata) do
filename = Rails.root + "spec/fixtures/pages.#{format}.meta"
fixture_file_upload(filename) if File.exists?(filename)
end
 
it 'succeeds' do
expect(project.pages_url).to be_nil
expect(execute).to eq(:success)
expect(project.pages_url).to_not be_nil
end
before do
build.update_attributes(artifacts_file: file)
build.update_attributes(artifacts_metadata: metadata)
end
 
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).to_not eq(:success)
end
it 'succeeds' do
expect(project.pages_url).to be_nil
expect(execute).to eq(:success)
expect(project.pages_url).to_not be_nil
end
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).to_not eq(:success)
end
 
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_url).to be_nil
expect(execute).to eq(:success)
expect(project.pages_url).to_not be_nil
project.destroy
expect(Dir.exist?(project.public_pages_path)).to be_falsey
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_url).to be_nil
expect(execute).to eq(:success)
expect(project.pages_url).to_not be_nil
project.destroy
expect(Dir.exist?(project.public_pages_path)).to be_falsey
end
it 'fails if sha on branch is not latest' do
commit.update_attributes(sha: 'old_sha')
build.update_attributes(artifacts_file: file)
expect(execute).to_not eq(:success)
end
it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file)
expect(execute).to_not eq(:success)
end
end
end
 
Loading
Loading
@@ -48,21 +69,10 @@ describe Projects::UpdatePagesService do
expect(execute).to_not eq(:success)
end
 
it 'fails for empty file fails' do
build.update_attributes(artifacts_file: empty_file)
expect(execute).to_not eq(:success)
end
it 'fails for invalid archive' do
build.update_attributes(artifacts_file: invalid_file)
expect(execute).to_not eq(:success)
end
it 'fails if sha on branch is not latest' do
commit.update_attributes(sha: 'old_sha')
build.update_attributes(artifacts_file: file)
expect(execute).to_not eq(:success)
end
def execute
subject.execute[:status]
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment