Skip to content
Snippets Groups Projects
Commit 810866ec authored by Tiago Botelho's avatar Tiago Botelho
Browse files

backports changed import logic from pull mirroring feature into CE

parent f07aee72
No related branches found
No related tags found
No related merge requests found
Showing
with 143 additions and 71 deletions
Loading
Loading
@@ -56,6 +56,8 @@
if (job.import_status === 'finished') {
job_item.removeClass("active").addClass("success");
return status_field.html('<span><i class="fa fa-check"></i> done</span>');
} else if (job.import_status === 'scheduled') {
return status_field.html("<i class='fa fa-spinner fa-spin'></i> scheduled");
} else if (job.import_status === 'started') {
return status_field.html("<i class='fa fa-spinner fa-spin'></i> started");
} else {
Loading
Loading
Loading
Loading
@@ -14,14 +14,7 @@ class Projects::ImportsController < Projects::ApplicationController
@project.import_url = params[:project][:import_url]
 
if @project.save
@project.reload
if @project.import_failed?
@project.import_retry
else
@project.import_start
@project.add_import_job
end
@project.reload.import_schedule
end
 
redirect_to namespace_project_import_path(@project.namespace, @project)
Loading
Loading
Loading
Loading
@@ -199,7 +199,7 @@ class ApplicationSetting < ActiveRecord::Base
ApplicationSetting.define_attribute_methods
end
 
def self.defaults_ce
def self.defaults
{
after_sign_up_text: nil,
akismet_enabled: false,
Loading
Loading
@@ -250,10 +250,6 @@ class ApplicationSetting < ActiveRecord::Base
}
end
 
def self.defaults
defaults_ce
end
def self.create_from_defaults
create(defaults)
end
Loading
Loading
Loading
Loading
@@ -165,7 +165,7 @@ class Project < ActiveRecord::Base
has_many :todos, dependent: :destroy
has_many :notification_settings, dependent: :destroy, as: :source
 
has_one :import_data, dependent: :destroy, class_name: "ProjectImportData"
has_one :import_data, dependent: :delete, class_name: "ProjectImportData"
has_one :project_feature, dependent: :destroy
has_one :statistics, class_name: 'ProjectStatistics', dependent: :delete
has_many :container_repositories, dependent: :destroy
Loading
Loading
@@ -298,8 +298,16 @@ class Project < ActiveRecord::Base
scope :excluding_project, ->(project) { where.not(id: project) }
 
state_machine :import_status, initial: :none do
event :import_schedule do
transition [:none, :finished, :failed] => :scheduled
end
event :force_import_start do
transition [:none, :finished, :failed] => :started
end
event :import_start do
transition [:none, :finished] => :started
transition scheduled: :started
end
 
event :import_finish do
Loading
Loading
@@ -307,18 +315,23 @@ class Project < ActiveRecord::Base
end
 
event :import_fail do
transition started: :failed
transition [:scheduled, :started] => :failed
end
 
event :import_retry do
transition failed: :started
end
 
state :scheduled
state :started
state :finished
state :failed
 
after_transition any => :finished, do: :reset_cache_and_import_attrs
after_transition [:none, :finished, :failed] => :scheduled do |project, _|
project.run_after_commit { add_import_job }
end
after_transition started: :finished, do: :reset_cache_and_import_attrs
end
 
class << self
Loading
Loading
@@ -530,9 +543,17 @@ class Project < ActiveRecord::Base
end
 
def import_in_progress?
import_started? || import_scheduled?
end
def import_started?
import? && import_status == 'started'
end
 
def import_scheduled?
import_status == 'scheduled'
end
def import_failed?
import_status == 'failed'
end
Loading
Loading
Loading
Loading
@@ -48,15 +48,14 @@ module Projects
 
save_project_and_import_data(import_data)
 
@project.import_start if @project.import?
after_create_actions if @project.persisted?
 
if @project.errors.empty?
@project.add_import_job if @project.import?
@project.import_schedule if @project.import?
else
fail(error: @project.errors.full_messages.join(', '))
end
@project
rescue ActiveRecord::RecordInvalid => e
message = "Unable to save #{e.record.type}: #{e.record.errors.full_messages.join(", ")} "
Loading
Loading
class RepositoryForkWorker
ForkError = Class.new(StandardError)
include Sidekiq::Worker
include Gitlab::ShellAdapter
include DedicatedSidekiqQueue
Loading
Loading
@@ -8,29 +10,31 @@ class RepositoryForkWorker
source_path: source_path,
target_path: target_path)
 
project = Project.find_by_id(project_id)
unless project.present?
logger.error("Project #{project_id} no longer exists!")
return
end
project = Project.find(project_id)
project.import_start
 
result = gitlab_shell.fork_repository(forked_from_repository_storage_path, source_path,
project.repository_storage_path, target_path)
unless result
logger.error("Unable to fork project #{project_id} for repository #{source_path} -> #{target_path}")
project.mark_import_as_failed('The project could not be forked.')
return
end
raise ForkError, "Unable to fork project #{project_id} for repository #{source_path} -> #{target_path}" unless result
 
project.repository.after_import
unless project.valid_repo?
logger.error("Project #{project_id} had an invalid repository after fork")
project.mark_import_as_failed('The forked repository is invalid.')
return
end
raise ForkError, "Project #{project_id} had an invalid repository after fork" unless project.valid_repo?
 
project.import_finish
rescue ForkError => ex
fail_fork(project, ex.message)
raise
rescue => ex
return unless project
fail_fork(project, ex.message)
raise ForkError, "#{ex.class} #{ex.message}"
end
private
def fail_fork(project, message)
Rails.logger.error(message)
project.mark_import_as_failed(message)
end
end
class RepositoryImportWorker
ImportError = Class.new(StandardError)
include Sidekiq::Worker
include DedicatedSidekiqQueue
 
Loading
Loading
@@ -10,6 +12,8 @@ class RepositoryImportWorker
@project = Project.find(project_id)
@current_user = @project.creator
 
project.import_start
Gitlab::Metrics.add_event(:import_repository,
import_url: @project.import_url,
path: @project.path_with_namespace)
Loading
Loading
@@ -17,13 +21,23 @@ class RepositoryImportWorker
project.update_columns(import_jid: self.jid, import_error: nil)
 
result = Projects::ImportService.new(project, current_user).execute
if result[:status] == :error
project.mark_import_as_failed(result[:message])
return
end
raise ImportError, result[:message] if result[:status] == :error
 
project.repository.after_import
project.import_finish
rescue ImportError => ex
fail_import(project, ex.message)
raise
rescue => ex
return unless project
fail_import(project, ex.message)
raise ImportError, "#{ex.class} #{ex.message}"
end
private
def fail_import(project, message)
project.mark_import_as_failed(message)
end
end
Loading
Loading
@@ -37,7 +37,7 @@ class GithubImport
end
 
def import!
@project.import_start
@project.force_import_start
 
timings = Benchmark.measure do
Github::Import.new(@project, @options).execute
Loading
Loading
Loading
Loading
@@ -7,5 +7,9 @@ FactoryGirl.define do
link.forked_from_project.reload
link.forked_to_project.reload
end
trait :forked_to_empty_project do
association :forked_to_project, factory: :empty_project
end
end
end
Loading
Loading
@@ -24,6 +24,22 @@ FactoryGirl.define do
visibility_level Gitlab::VisibilityLevel::PRIVATE
end
 
trait :import_scheduled do
import_status :scheduled
end
trait :import_started do
import_status :started
end
trait :import_finished do
import_status :finished
end
trait :import_failed do
import_status :failed
end
trait :archived do
archived true
end
Loading
Loading
Loading
Loading
@@ -68,9 +68,14 @@ feature 'Diffs URL', js: true, feature: true do
let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, target_project: project, author: author_user) }
let(:changelog_id) { Digest::SHA1.hexdigest("CHANGELOG") }
 
before do
forked_project.repository.after_import
end
context 'as author' do
it 'shows direct edit link' do
login_as(author_user)
visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
 
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
Loading
Loading
@@ -81,6 +86,7 @@ feature 'Diffs URL', js: true, feature: true do
context 'as user who needs to fork' do
it 'shows fork/cancel confirmation' do
login_as(user)
visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
 
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
Loading
Loading
Loading
Loading
@@ -50,7 +50,7 @@ describe Project, models: true do
it { is_expected.to have_one(:external_wiki_service).dependent(:destroy) }
it { is_expected.to have_one(:project_feature).dependent(:destroy) }
it { is_expected.to have_one(:statistics).class_name('ProjectStatistics').dependent(:delete) }
it { is_expected.to have_one(:import_data).class_name('ProjectImportData').dependent(:destroy) }
it { is_expected.to have_one(:import_data).class_name('ProjectImportData').dependent(:delete) }
it { is_expected.to have_one(:last_event).class_name('Event') }
it { is_expected.to have_one(:forked_from_project).through(:forked_project_link) }
it { is_expected.to have_many(:commit_statuses) }
Loading
Loading
@@ -1446,16 +1446,13 @@ describe Project, models: true do
end
 
describe 'Project import job' do
let(:project) { create(:empty_project) }
let(:mirror) { false }
let(:project) { create(:empty_project, import_url: generate(:url)) }
 
before do
allow_any_instance_of(Gitlab::Shell).to receive(:import_repository)
.with(project.repository_storage_path, project.path_with_namespace, project.import_url)
.and_return(true)
 
allow(project).to receive(:repository_exists?).and_return(true)
expect_any_instance_of(Repository).to receive(:after_import)
.and_call_original
end
Loading
Loading
@@ -1463,8 +1460,7 @@ describe Project, models: true do
it 'imports a project' do
expect_any_instance_of(RepositoryImportWorker).to receive(:perform).and_call_original
 
project.import_start
project.add_import_job
project.import_schedule
 
expect(project.reload.import_status).to eq('finished')
end
Loading
Loading
@@ -1551,7 +1547,7 @@ describe Project, models: true do
 
describe '#add_import_job' do
context 'forked' do
let(:forked_project_link) { create(:forked_project_link) }
let(:forked_project_link) { create(:forked_project_link, :forked_to_empty_project) }
let(:forked_from_project) { forked_project_link.forked_from_project }
let(:project) { forked_project_link.forked_to_project }
 
Loading
Loading
@@ -1565,9 +1561,9 @@ describe Project, models: true do
end
 
context 'not forked' do
let(:project) { create(:empty_project) }
it 'schedules a RepositoryImportWorker job' do
project = create(:empty_project, import_url: generate(:url))
expect(RepositoryImportWorker).to receive(:perform_async).with(project.id)
 
project.add_import_job
Loading
Loading
Loading
Loading
@@ -161,15 +161,13 @@ describe Projects::CreateService, '#execute', services: true do
end
 
context 'when a bad service template is created' do
before do
create(:service, type: 'DroneCiService', project: nil, template: true, active: true)
end
it 'reports an error in the imported project' do
opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-ce'
create(:service, type: 'DroneCiService', project: nil, template: true, active: true)
project = create_project(user, opts)
 
expect(project.errors.full_messages_for(:base).first).to match /Unable to save project. Error: Unable to save DroneCiService/
expect(project.errors.full_messages_for(:base).first).to match(/Unable to save project. Error: Unable to save DroneCiService/)
expect(project.services.count).to eq 0
end
end
Loading
Loading
require 'spec_helper'
 
describe RepositoryForkWorker do
let(:project) { create(:project, :repository) }
let(:project) { create(:project, :repository, :import_scheduled) }
let(:fork_project) { create(:project, :repository, forked_from_project: project) }
let(:shell) { Gitlab::Shell.new }
 
Loading
Loading
@@ -46,15 +46,27 @@ describe RepositoryForkWorker do
end
 
it "handles bad fork" do
source_path = project.full_path
target_path = fork_project.namespace.full_path
error_message = "Unable to fork project #{project.id} for repository #{source_path} -> #{target_path}"
expect(shell).to receive(:fork_repository).and_return(false)
 
expect(subject.logger).to receive(:error)
expect do
subject.perform(project.id, '/test/path', source_path, target_path)
end.to raise_error(RepositoryForkWorker::ForkError, error_message)
end
 
subject.perform(
project.id,
'/test/path',
project.full_path,
fork_project.namespace.full_path)
it 'handles unexpected error' do
source_path = project.full_path
target_path = fork_project.namespace.full_path
allow_any_instance_of(Gitlab::Shell).to receive(:fork_repository).and_raise(RuntimeError)
expect do
subject.perform(project.id, '/test/path', source_path, target_path)
end.to raise_error(RepositoryForkWorker::ForkError)
expect(project.reload.import_status).to eq('failed')
end
end
end
require 'spec_helper'
 
describe RepositoryImportWorker do
let(:project) { create(:empty_project) }
let(:project) { create(:empty_project, :import_scheduled) }
 
subject { described_class.new }
 
Loading
Loading
@@ -21,15 +21,26 @@ describe RepositoryImportWorker do
context 'when the import has failed' do
it 'hide the credentials that were used in the import URL' do
error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
expect_any_instance_of(Projects::ImportService).to receive(:execute).
and_return({ status: :error, message: error })
allow(subject).to receive(:jid).and_return('123')
 
subject.perform(project.id)
expect_any_instance_of(Projects::ImportService).to receive(:execute).and_return({ status: :error, message: error })
allow(subject).to receive(:jid).and_return('123')
 
expect(project.reload.import_error).to include("https://*****:*****@test.com/root/repoC.git/")
expect do
subject.perform(project.id)
end.to raise_error(RepositoryImportWorker::ImportError, error)
expect(project.reload.import_jid).not_to be_nil
end
end
context 'with unexpected error' do
it 'marks import as failed' do
allow_any_instance_of(Projects::ImportService).to receive(:execute).and_raise(RuntimeError)
expect do
subject.perform(project.id)
end.to raise_error(RepositoryImportWorker::ImportError)
expect(project.reload.import_status).to eq('failed')
end
end
end
end
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment