diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 095a346f337e429259c1a59cdcd620c95af2a63e..da8e66e5f6e5524713a531ce9f1200b1e4500391 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -457,7 +457,7 @@ module Ci
       build_data = Gitlab::DataBuilder::Build.build(self)
       project.execute_hooks(build_data.dup, :build_hooks)
       project.execute_services(build_data.dup, :build_hooks)
-      UpdatePagesService.new(build_data).execute
+      PagesService.new(build_data).execute
       project.running_or_pending_build_count(force: true)
     end
 
diff --git a/app/services/update_pages_service.rb b/app/services/pages_service.rb
similarity index 92%
rename from app/services/update_pages_service.rb
rename to app/services/pages_service.rb
index 39f08b2a03dece4175478f72a9e682511000ac34..446eeb34d3b3f903b1ad52cd808957e53a7bda76 100644
--- a/app/services/update_pages_service.rb
+++ b/app/services/pages_service.rb
@@ -1,4 +1,4 @@
-class UpdatePagesService
+class PagesService
   attr_reader :data
 
   def initialize(data)
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
new file mode 100644
index 0000000000000000000000000000000000000000..e1bb4c92e405cac08507463c5db6bc546ae23646
--- /dev/null
+++ b/app/services/projects/update_pages_service.rb
@@ -0,0 +1,132 @@
+module Projects
+  class UpdatePagesService < BaseService
+    BLOCK_SIZE = 32.kilobytes
+    MAX_SIZE = 1.terabyte
+
+    attr_reader :build
+
+    def initialize(project, build)
+      @project, @build = project, build
+    end
+
+    def execute
+      # Create status notifying the deployment of pages
+      @status = create_status
+      @status.run!
+
+      raise 'missing pages artifacts' unless build.artifacts_file?
+      raise 'pages are outdated' unless latest?
+
+      # Create temporary directory in which we will extract the artifacts
+      FileUtils.mkdir_p(tmp_path)
+      Dir.mktmpdir(nil, tmp_path) do |archive_path|
+        extract_archive!(archive_path)
+
+        # Check if we did extract public directory
+        archive_public_path = File.join(archive_path, 'public')
+        raise 'pages miss the public folder' unless Dir.exists?(archive_public_path)
+        raise 'pages are outdated' unless latest?
+
+        deploy_page!(archive_public_path)
+        success
+      end
+    rescue => e
+      error(e.message)
+    end
+
+    private
+
+    def success
+      @status.success
+      super
+    end
+
+    def error(message, http_status = nil)
+      @status.allow_failure = !latest?
+      @status.description = message
+      @status.drop
+      super
+    end
+
+    def create_status
+      GenericCommitStatus.new(
+        project: project,
+        commit: build.commit,
+        user: build.user,
+        ref: build.ref,
+        stage: 'deploy',
+        name: 'pages:deploy'
+      )
+    end
+
+    def extract_archive!(temp_path)
+      results = Open3.pipeline(%W(gunzip -c #{artifacts}),
+                               %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
+                               %W(tar -x -C #{temp_path} public/),
+                               err: '/dev/null')
+      raise 'pages failed to extract' unless results.compact.all?(&:success?)
+    end
+
+    def deploy_page!(archive_public_path)
+      # Do atomic move of pages
+      # Move and removal may not be atomic, but they are significantly faster then extracting and removal
+      # 1. We move deployed public to previous public path (file removal is slow)
+      # 2. We move temporary public to be deployed public
+      # 3. We remove previous public path
+      FileUtils.mkdir_p(pages_path)
+      begin
+        FileUtils.move(public_path, previous_public_path)
+      rescue
+      end
+      FileUtils.move(archive_public_path, public_path)
+    ensure
+      FileUtils.rm_r(previous_public_path, force: true)
+    end
+
+    def latest?
+      # check if sha for the ref is still the most recent one
+      # this helps in case when multiple deployments happens
+      sha == latest_sha
+    end
+
+    def blocks
+      # Calculate dd parameters: we limit the size of pages
+      max_size = current_application_settings.max_pages_size.megabytes
+      max_size ||= MAX_SIZE
+      blocks = 1 + max_size / BLOCK_SIZE
+      blocks
+    end
+
+    def tmp_path
+      @tmp_path ||= File.join(Settings.pages.path, 'tmp')
+    end
+
+    def pages_path
+      @pages_path ||= project.pages_path
+    end
+
+    def public_path
+      @public_path ||= File.join(pages_path, 'public')
+    end
+
+    def previous_public_path
+      @previous_public_path ||= File.join(pages_path, "public.#{SecureRandom.hex}")
+    end
+
+    def ref
+      build.ref
+    end
+
+    def artifacts
+      build.artifacts_file.path
+    end
+
+    def latest_sha
+      project.commit(build.ref).try(:sha).to_s
+    end
+
+    def sha
+      build.sha
+    end
+  end
+end
diff --git a/app/workers/pages_worker.rb b/app/workers/pages_worker.rb
index ff765a6c13c898526a8ecb05acc846ace70281ac..8c99e8dbe763c31f951e757d7db2b78e67bb40c1 100644
--- a/app/workers/pages_worker.rb
+++ b/app/workers/pages_worker.rb
@@ -1,9 +1,5 @@
 class PagesWorker
   include Sidekiq::Worker
-  include Gitlab::CurrentSettings
-
-  BLOCK_SIZE = 32.kilobytes
-  MAX_SIZE = 1.terabyte
 
   sidekiq_options queue: :pages, retry: false
 
@@ -12,137 +8,12 @@ class PagesWorker
   end
 
   def deploy(build_id)
-    @build_id = build_id
-    return unless valid?
-
-    # Create status notifying the deployment of pages
-    @status = create_status
-    @status.run!
-
-    raise 'pages are outdated' unless latest?
-
-    # Create temporary directory in which we will extract the artifacts
-    FileUtils.mkdir_p(tmp_path)
-    Dir.mktmpdir(nil, tmp_path) do |archive_path|
-      extract_archive!(archive_path)
-
-      # Check if we did extract public directory
-      archive_public_path = File.join(archive_path, 'public')
-      raise 'pages miss the public folder' unless Dir.exists?(archive_public_path)
-      raise 'pages are outdated' unless latest?
-
-      deploy_page!(archive_public_path)
-
-      @status.success
-    end
-  rescue => e
-    fail(e.message, !latest?)
-    return false
+    build = Ci::Build.find_by(id: build_id)
+    Projects::UpdatePagesService.new(build.project, build).execute
   end
 
   def remove(namespace_path, project_path)
     full_path = File.join(Settings.pages.path, namespace_path, project_path)
     FileUtils.rm_r(full_path, force: true)
   end
-
-  private
-
-  def create_status
-    GenericCommitStatus.new(
-      project: project,
-      commit: build.commit,
-      user: build.user,
-      ref: build.ref,
-      stage: 'deploy',
-      name: 'pages:deploy'
-    )
-  end
-
-  def extract_archive!(temp_path)
-    results = Open3.pipeline(%W(gunzip -c #{artifacts}),
-                             %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
-                             %W(tar -x -C #{temp_path} public/),
-                             err: '/dev/null')
-    raise 'pages failed to extract' unless results.compact.all?(&:success?)
-  end
-
-  def deploy_page!(archive_public_path)
-    # Do atomic move of pages
-    # Move and removal may not be atomic, but they are significantly faster then extracting and removal
-    # 1. We move deployed public to previous public path (file removal is slow)
-    # 2. We move temporary public to be deployed public
-    # 3. We remove previous public path
-    FileUtils.mkdir_p(pages_path)
-    begin
-      FileUtils.move(public_path, previous_public_path)
-    rescue
-    end
-    FileUtils.move(archive_public_path, public_path)
-  ensure
-    FileUtils.rm_r(previous_public_path, force: true)
-  end
-
-  def fail(message, allow_failure = true)
-    @status.allow_failure = allow_failure
-    @status.description = message
-    @status.drop
-  end
-
-  def valid?
-    build && build.artifacts_file?
-  end
-
-  def latest?
-    # check if sha for the ref is still the most recent one
-    # this helps in case when multiple deployments happens
-    sha == latest_sha
-  end
-
-  def blocks
-    # Calculate dd parameters: we limit the size of pages
-    max_size = current_application_settings.max_pages_size.megabytes
-    max_size ||= MAX_SIZE
-    blocks = 1 + max_size / BLOCK_SIZE
-    blocks
-  end
-
-  def build
-    @build ||= Ci::Build.find_by(id: @build_id)
-  end
-
-  def project
-    @project ||= build.project
-  end
-
-  def tmp_path
-    @tmp_path ||= File.join(Settings.pages.path, 'tmp')
-  end
-
-  def pages_path
-    @pages_path ||= project.pages_path
-  end
-
-  def public_path
-    @public_path ||= File.join(pages_path, 'public')
-  end
-
-  def previous_public_path
-    @previous_public_path ||= File.join(pages_path, "public.#{SecureRandom.hex}")
-  end
-
-  def ref
-    build.ref
-  end
-
-  def artifacts
-    build.artifacts_file.path
-  end
-
-  def latest_sha
-    project.commit(build.ref).try(:sha).to_s
-  end
-
-  def sha
-    build.sha
-  end
 end
diff --git a/spec/services/update_pages_service_spec.rb b/spec/services/pages_service_spec.rb
similarity index 91%
rename from spec/services/update_pages_service_spec.rb
rename to spec/services/pages_service_spec.rb
index cf1ca15da4449363c2041fe89d6fb1ec5972a93e..e6ad93358a00760e5e6ee851afd1375d57dac95e 100644
--- a/spec/services/update_pages_service_spec.rb
+++ b/spec/services/pages_service_spec.rb
@@ -1,9 +1,9 @@
 require 'spec_helper'
 
-describe UpdatePagesService, services: true do
+describe PagesService, services: true do
   let(:build) { create(:ci_build) }
   let(:data) { Gitlab::BuildDataBuilder.build(build) }
-  let(:service) { UpdatePagesService.new(data) }
+  let(:service) { PagesService.new(data) }
 
   before do
     allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
diff --git a/spec/workers/pages_worker_spec.rb b/spec/services/projects/update_pages_worker_spec.rb
similarity index 80%
rename from spec/workers/pages_worker_spec.rb
rename to spec/services/projects/update_pages_worker_spec.rb
index 85592154598cd1c38d94937add0d90a6a8a584f1..0607c025b9eefdb46b5df4f6922b3a21c60ed61f 100644
--- a/spec/workers/pages_worker_spec.rb
+++ b/spec/services/projects/update_pages_worker_spec.rb
@@ -1,13 +1,14 @@
 require "spec_helper"
 
-describe PagesWorker do
+describe Projects::UpdatePagesService do
   let(:project) { create :project }
   let(:commit) { create :ci_commit, project: project, sha: project.commit('HEAD').sha }
   let(:build) { create :ci_build, commit: commit, ref: 'HEAD' }
-  let(:worker) { PagesWorker.new }
   let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages.tar.gz', 'application/octet-stream') }
   let(:empty_file) { fixture_file_upload(Rails.root + 'spec/fixtures/pages_empty.tar.gz', 'application/octet-stream') }
   let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'application/octet-stream') }
+  
+  subject { described_class.new(project, build) }
 
   before do
     project.remove_pages
@@ -18,19 +19,19 @@ describe PagesWorker do
 
     it 'succeeds' do
       expect(project.pages_url).to be_nil
-      expect(worker.deploy(build.id)).to be_truthy
+      expect(execute).to eq(:success)
       expect(project.pages_url).to_not be_nil
     end
 
     it 'limits pages size' do
       stub_application_setting(max_pages_size: 1)
-      expect(worker.deploy(build.id)).to_not be_truthy
+      expect(execute).to_not eq(:success)
     end
 
     it 'removes pages after destroy' do
       expect(PagesWorker).to receive(:perform_in)
       expect(project.pages_url).to be_nil
-      expect(worker.deploy(build.id)).to be_truthy
+      expect(execute).to eq(:success)
       expect(project.pages_url).to_not be_nil
       project.destroy
       expect(Dir.exist?(project.public_pages_path)).to be_falsey
@@ -44,22 +45,26 @@ describe PagesWorker do
   end
 
   it 'fails if no artifacts' do
-    expect(worker.deploy(build.id)).to_not be_truthy
+    expect(execute).to_not eq(:success)
   end
 
   it 'fails for empty file fails' do
     build.update_attributes(artifacts_file: empty_file)
-    expect(worker.deploy(build.id)).to_not be_truthy
+    expect(execute).to_not eq(:success)
   end
 
   it 'fails for invalid archive' do
     build.update_attributes(artifacts_file: invalid_file)
-    expect(worker.deploy(build.id)).to_not be_truthy
+    expect(execute).to_not eq(:success)
   end
 
   it 'fails if sha on branch is not latest' do
     commit.update_attributes(sha: 'old_sha')
     build.update_attributes(artifacts_file: file)
-    expect(worker.deploy(build.id)).to_not be_truthy
+    expect(execute).to_not eq(:success)
+  end
+  
+  def execute
+    subject.execute[:status]
   end
 end