Skip to content
Snippets Groups Projects
Verified Commit ffb9b3ef authored by Yorick Peterse's avatar Yorick Peterse
Browse files

Refactor cache refreshing/expiring

This refactors repository caching so it's possible to selectively
refresh certain caches, instead of just expiring and refreshing
everything.

To allow this the various methods that were cached (e.g. "tag_count" and
"readme") use a similar pattern that makes expiring and refreshing
their data much easier.

In this new setup caches are refreshed as follows:

1. After a commit (but before running ProjectCacheWorker) we expire some
   basic caches such as the commit count and repository size.

2. ProjectCacheWorker will recalculate the commit count, repository
   size, then refresh a specific set of caches based on the list of
   files changed in a push payload.

This requires a bunch of changes to the various methods that may be
cached. For one, data should not be cached if a branch used or the
entire repository does not exist. To prevent all these methods from
handling this manually this is taken care of in
Repository#cache_method_output. Some methods still manually check for
the existence of a repository but this result is also cached.

With selective flushing implemented ProjectCacheWorker no longer uses an
exclusive lease for all of its work. Instead this worker only uses a
lease to limit the number of times the repository size is updated as
this is a fairly expensive operation.
parent 6f393877
No related branches found
No related tags found
No related merge requests found
Loading
Loading
@@ -1086,7 +1086,7 @@ class Project < ActiveRecord::Base
"refs/heads/#{branch}",
force: true)
repository.copy_gitattributes(branch)
repository.expire_avatar_cache(branch)
repository.expire_avatar_cache
reload_default_branch
end
 
Loading
Loading
require 'securerandom'
 
class Repository
include Gitlab::ShellAdapter
attr_accessor :path_with_namespace, :project
class CommitError < StandardError; end
 
# Files to use as a project avatar in case no avatar was uploaded via the web
# UI.
AVATAR_FILES = %w{logo.png logo.jpg logo.gif}
# Methods that cache data from the Git repository.
#
# Each entry in this Array should have a corresponding method with the exact
# same name. The cache key used by those methods must also match method's
# name.
#
# For example, for entry `:readme` there's a method called `readme` which
# stores its data in the `readme` cache key.
CACHED_METHODS = %i(size commit_count readme version contribution_guide
changelog license_blob license_key gitignore koding_yml
gitlab_ci_yml branch_names tag_names branch_count
tag_count avatar exists? empty? root_ref)
# Certain method caches should be refreshed when certain types of files are
# changed. This Hash maps file types (as returned by Gitlab::FileDetector) to
# the corresponding methods to call for refreshing caches.
METHOD_CACHES_FOR_FILE_TYPES = {
readme: :readme,
changelog: :changelog,
license: %i(license_blob license_key),
contributing: :contribution_guide,
version: :version,
gitignore: :gitignore,
koding: :koding_yml,
gitlab_ci: :gitlab_ci_yml,
avatar: :avatar
}
# Wraps around the given method and caches its output in Redis and an instance
# variable.
#
# This only works for methods that do not take any arguments.
def self.cache_method(name, fallback: nil)
original = :"_uncached_#{name}"
 
include Gitlab::ShellAdapter
alias_method(original, name)
 
attr_accessor :path_with_namespace, :project
define_method(name) do
cache_method_output(name, fallback: fallback) { __send__(original) }
end
end
 
def self.storages
Gitlab.config.repositories.storages
Loading
Loading
@@ -37,20 +75,6 @@ class Repository
)
end
 
def exists?
return @exists unless @exists.nil?
@exists = cache.fetch(:exists?) do
refs_directory_exists?
end
end
def empty?
return @empty unless @empty.nil?
@empty = cache.fetch(:empty?) { raw_repository.empty? }
end
#
# Git repository can contains some hidden refs like:
# /refs/notes/*
Loading
Loading
@@ -217,10 +241,6 @@ class Repository
branch_names + tag_names
end
 
def branch_names
@branch_names ||= cache.fetch(:branch_names) { branches.map(&:name) }
end
def branch_exists?(branch_name)
branch_names.include?(branch_name)
end
Loading
Loading
@@ -270,34 +290,6 @@ class Repository
ref_exists?(keep_around_ref_name(sha))
end
 
def tag_names
cache.fetch(:tag_names) { raw_repository.tag_names }
end
def commit_count
cache.fetch(:commit_count) do
begin
raw_repository.commit_count(self.root_ref)
rescue
0
end
end
end
def branch_count
@branch_count ||= cache.fetch(:branch_count) { branches.size }
end
def tag_count
@tag_count ||= cache.fetch(:tag_count) { raw_repository.rugged.tags.count }
end
# Return repo size in megabytes
# Cached in redis
def size
cache.fetch(:size) { raw_repository.size }
end
def diverging_commit_counts(branch)
root_ref_hash = raw_repository.rev_parse_target(root_ref).oid
cache.fetch(:"diverging_commit_counts_#{branch.name}") do
Loading
Loading
@@ -313,48 +305,55 @@ class Repository
end
end
 
# Keys for data that can be affected for any commit push.
def cache_keys
%i(size commit_count
readme version contribution_guide changelog
license_blob license_key gitignore koding_yml gitlab_ci_yml)
def expire_tags_cache
expire_method_caches(%i(tag_names tag_count))
@tags = nil
end
 
# Keys for data on branch/tag operations.
def cache_keys_for_branches_and_tags
%i(branch_names tag_names branch_count tag_count)
def expire_branches_cache
expire_method_caches(%i(branch_names branch_count))
@local_branches = nil
end
 
def build_cache
(cache_keys + cache_keys_for_branches_and_tags).each do |key|
unless cache.exist?(key)
send(key)
end
end
def expire_statistics_caches
expire_method_caches(%i(size commit_count))
end
 
def expire_tags_cache
cache.expire(:tag_names)
@tags = nil
def expire_all_method_caches
expire_method_caches(CACHED_METHODS)
end
 
def expire_branches_cache
cache.expire(:branch_names)
@branch_names = nil
@local_branches = nil
# Expires the caches of a specific set of methods
def expire_method_caches(methods)
methods.each do |key|
cache.expire(key)
ivar = cache_instance_variable_name(key)
remove_instance_variable(ivar) if instance_variable_defined?(ivar)
end
end
 
def expire_cache(branch_name = nil, revision = nil)
cache_keys.each do |key|
cache.expire(key)
def expire_avatar_cache
expire_method_caches(%i(avatar))
end
# Refreshes the method caches of this repository.
#
# types - An Array of file types (e.g. `:readme`) used to refresh extra
# caches.
def refresh_method_caches(types)
to_refresh = []
types.each do |type|
methods = METHOD_CACHES_FOR_FILE_TYPES[type.to_sym]
to_refresh.concat(Array(methods)) if methods
end
 
expire_branch_cache(branch_name)
expire_avatar_cache(branch_name, revision)
expire_method_caches(to_refresh)
 
# This ensures this particular cache is flushed after the first commit to a
# new repository.
expire_emptiness_caches if empty?
to_refresh.each { |method| send(method) }
end
 
def expire_branch_cache(branch_name = nil)
Loading
Loading
@@ -373,15 +372,14 @@ class Repository
end
 
def expire_root_ref_cache
cache.expire(:root_ref)
@root_ref = nil
expire_method_caches(%i(root_ref))
end
 
# Expires the cache(s) used to determine if a repository is empty or not.
def expire_emptiness_caches
cache.expire(:empty?)
@empty = nil
return unless empty?
 
expire_method_caches(%i(empty?))
expire_has_visible_content_cache
end
 
Loading
Loading
@@ -390,51 +388,22 @@ class Repository
@has_visible_content = nil
end
 
def expire_branch_count_cache
cache.expire(:branch_count)
@branch_count = nil
end
def expire_tag_count_cache
cache.expire(:tag_count)
@tag_count = nil
end
def lookup_cache
@lookup_cache ||= {}
end
 
def expire_avatar_cache(branch_name = nil, revision = nil)
# Avatars are pulled from the default branch, thus if somebody pushes to a
# different branch there's no need to expire anything.
return if branch_name && branch_name != root_ref
# We don't want to flush the cache if the commit didn't actually make any
# changes to any of the possible avatar files.
if revision && commit = self.commit(revision)
return unless commit.raw_diffs(deltas_only: true).
any? { |diff| AVATAR_FILES.include?(diff.new_path) }
end
cache.expire(:avatar)
@avatar = nil
end
def expire_exists_cache
cache.expire(:exists?)
@exists = nil
expire_method_caches(%i(exists?))
end
 
# expire cache that doesn't depend on repository data (when expiring)
def expire_content_cache
expire_tags_cache
expire_tag_count_cache
expire_branches_cache
expire_branch_count_cache
expire_root_ref_cache
expire_emptiness_caches
expire_exists_cache
expire_statistics_caches
end
 
# Runs code after a repository has been created.
Loading
Loading
@@ -449,9 +418,8 @@ class Repository
# Runs code just before a repository is deleted.
def before_delete
expire_exists_cache
expire_cache if exists?
expire_all_method_caches
expire_branch_cache if exists?
expire_content_cache
 
repository_event(:remove_repository)
Loading
Loading
@@ -468,9 +436,9 @@ class Repository
 
# Runs code before pushing (= creating or removing) a tag.
def before_push_tag
expire_cache
expire_statistics_caches
expire_emptiness_caches
expire_tags_cache
expire_tag_count_cache
 
repository_event(:push_tag)
end
Loading
Loading
@@ -478,7 +446,7 @@ class Repository
# Runs code before removing a tag.
def before_remove_tag
expire_tags_cache
expire_tag_count_cache
expire_statistics_caches
 
repository_event(:remove_tag)
end
Loading
Loading
@@ -490,12 +458,14 @@ class Repository
# Runs code after a repository has been forked/imported.
def after_import
expire_content_cache
build_cache
expire_tags_cache
expire_branches_cache
end
 
# Runs code after a new commit has been pushed.
def after_push_commit(branch_name, revision)
expire_cache(branch_name, revision)
def after_push_commit(branch_name)
expire_statistics_caches
expire_branch_cache(branch_name)
 
repository_event(:push_commit, branch: branch_name)
end
Loading
Loading
@@ -504,7 +474,6 @@ class Repository
def after_create_branch
expire_branches_cache
expire_has_visible_content_cache
expire_branch_count_cache
 
repository_event(:push_branch)
end
Loading
Loading
@@ -519,7 +488,6 @@ class Repository
# Runs code after an existing branch has been removed.
def after_remove_branch
expire_has_visible_content_cache
expire_branch_count_cache
expire_branches_cache
end
 
Loading
Loading
@@ -546,82 +514,127 @@ class Repository
Gitlab::Git::Blob.raw(self, oid)
end
 
def root_ref
if raw_repository
raw_repository.root_ref
else
# When the repo does not exist we raise this error so no data is cached.
raise Rugged::ReferenceError
end
end
cache_method :root_ref
def exists?
refs_directory_exists?
end
cache_method :exists?
def empty?
raw_repository.empty?
end
cache_method :empty?
# The size of this repository in megabytes.
def size
exists? ? raw_repository.size : 0.0
end
cache_method :size, fallback: 0.0
def commit_count
root_ref ? raw_repository.commit_count(root_ref) : 0
end
cache_method :commit_count, fallback: 0
def branch_names
branches.map(&:name)
end
cache_method :branch_names, fallback: []
def tag_names
raw_repository.tag_names
end
cache_method :tag_names, fallback: []
def branch_count
branches.size
end
cache_method :branch_count, fallback: 0
def tag_count
raw_repository.rugged.tags.count
end
cache_method :tag_count, fallback: 0
def avatar
if tree = file_on_head(:avatar)
tree.path
end
end
cache_method :avatar
def readme
cache.fetch(:readme) { tree(:head).readme }
if head = tree(:head)
head.readme
end
end
cache_method :readme
 
def version
cache.fetch(:version) do
file_on_head(:version)
end
file_on_head(:version)
end
cache_method :version
 
def contribution_guide
cache.fetch(:contribution_guide) do
file_on_head(:contributing)
end
file_on_head(:contributing)
end
cache_method :contribution_guide
 
def changelog
cache.fetch(:changelog) do
file_on_head(:changelog)
end
file_on_head(:changelog)
end
cache_method :changelog
 
def license_blob
return nil unless head_exists?
cache.fetch(:license_blob) do
file_on_head(:license)
end
file_on_head(:license)
end
cache_method :license_blob
 
def license_key
return nil unless head_exists?
return unless exists?
 
cache.fetch(:license_key) do
Licensee.license(path).try(:key)
end
Licensee.license(path).try(:key)
end
cache_method :license_key
 
def gitignore
return nil if !exists? || empty?
cache.fetch(:gitignore) do
file_on_head(:gitignore)
end
file_on_head(:gitignore)
end
cache_method :gitignore
 
def koding_yml
return nil unless head_exists?
cache.fetch(:koding_yml) do
file_on_head(:koding)
end
file_on_head(:koding)
end
cache_method :koding_yml
 
def gitlab_ci_yml
return nil unless head_exists?
@gitlab_ci_yml ||= cache.fetch(:gitlab_ci_yml) do
file_on_head(:gitlab_ci)
end
rescue Rugged::ReferenceError
# For unknow reason spinach scenario "Scenario: I change project path"
# lead to "Reference 'HEAD' not found" exception from Repository#empty?
nil
file_on_head(:gitlab_ci)
end
cache_method :gitlab_ci_yml
 
def head_commit
@head_commit ||= commit(self.root_ref)
end
 
def head_tree
@head_tree ||= Tree.new(self, head_commit.sha, nil)
if head_commit
@head_tree ||= Tree.new(self, head_commit.sha, nil)
end
end
 
def tree(sha = :head, path = nil, recursive: false)
if sha == :head
return unless head_commit
if path.nil?
return head_tree
else
Loading
Loading
@@ -771,10 +784,6 @@ class Repository
@tags ||= raw_repository.tags
end
 
def root_ref
@root_ref ||= cache.fetch(:root_ref) { raw_repository.root_ref }
end
def commit_dir(user, path, message, branch, author_email: nil, author_name: nil)
update_branch_with_hooks(user, branch) do |ref|
options = {
Loading
Loading
@@ -1132,12 +1141,41 @@ class Repository
end
end
 
def avatar
return nil unless exists?
# Caches the supplied block both in a cache and in an instance variable.
#
# The cache key and instance variable are named the same way as the value of
# the `key` argument.
#
# This method will return `nil` if the corresponding instance variable is also
# set to `nil`. This ensures we don't keep yielding the block when it returns
# `nil`.
#
# key - The name of the key to cache the data in.
# fallback - A value to fall back to in the event of a Git error.
def cache_method_output(key, fallback: nil, &block)
ivar = cache_instance_variable_name(key)
if instance_variable_defined?(ivar)
instance_variable_get(ivar)
else
begin
instance_variable_set(ivar, cache.fetch(key, &block))
rescue Rugged::ReferenceError, Gitlab::Git::Repository::NoRepository
# if e.g. HEAD or the entire repository doesn't exist we want to
# gracefully handle this and not cache anything.
fallback
end
end
end
 
@avatar ||= cache.fetch(:avatar) do
AVATAR_FILES.find do |file|
blob_at_branch(root_ref, file)
def cache_instance_variable_name(key)
:"@#{key.to_s.tr('?!', '')}"
end
def file_on_head(type)
if head = tree(:head)
head.blobs.find do |file|
Gitlab::FileDetector.type_of(file.name) == type
end
end
end
Loading
Loading
@@ -1154,16 +1192,6 @@ class Repository
@cache ||= RepositoryCache.new(path_with_namespace, @project.id)
end
 
def head_exists?
exists? && !empty? && !rugged.head_unborn?
end
def file_on_head(type)
tree(:head).blobs.find do |file|
Gitlab::FileDetector.type_of(file.name) == type
end
end
def tags_sorted_by_committed_date
tags.sort_by { |tag| tag.dereferenced_target.committed_date }
end
Loading
Loading
Loading
Loading
@@ -18,7 +18,7 @@ class GitPushService < BaseService
#
def execute
@project.repository.after_create if @project.empty_repo?
@project.repository.after_push_commit(branch_name, params[:newrev])
@project.repository.after_push_commit(branch_name)
 
if push_remove_branch?
@project.repository.after_remove_branch
Loading
Loading
@@ -51,12 +51,32 @@ class GitPushService < BaseService
 
execute_related_hooks
perform_housekeeping
update_caches
end
 
def update_gitattributes
@project.repository.copy_gitattributes(params[:ref])
end
 
def update_caches
if is_default_branch?
paths = Set.new
@push_commits.each do |commit|
commit.raw_diffs(deltas_only: true).each do |diff|
paths << diff.new_path
end
end
types = Gitlab::FileDetector.types_in_paths(paths.to_a)
else
types = []
end
ProjectCacheWorker.perform_async(@project.id, types)
end
protected
 
def execute_related_hooks
Loading
Loading
@@ -70,7 +90,6 @@ class GitPushService < BaseService
@project.execute_hooks(build_push_data.dup, :push_hooks)
@project.execute_services(build_push_data.dup, :push_hooks)
Ci::CreatePipelineService.new(@project, current_user, build_push_data).execute
ProjectCacheWorker.perform_async(@project.id)
 
if push_remove_branch?
AfterBranchDeleteService
Loading
Loading
# Worker for updating any project specific caches.
#
# This worker runs at most once every 15 minutes per project. This is to ensure
# that multiple instances of jobs for this worker don't hammer the underlying
# storage engine as much.
class ProjectCacheWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
 
LEASE_TIMEOUT = 15.minutes.to_i
 
def self.lease_for(project_id)
Gitlab::ExclusiveLease.
new("project_cache_worker:#{project_id}", timeout: LEASE_TIMEOUT)
end
# project_id - The ID of the project for which to flush the cache.
# refresh - An Array containing extra types of data to refresh such as
# `:readme` to flush the README and `:changelog` to flush the
# CHANGELOG.
def perform(project_id, refresh = [])
project = Project.find_by(id: project_id)
 
# Overwrite Sidekiq's implementation so we only schedule when actually needed.
def self.perform_async(project_id)
# If a lease for this project is still being held there's no point in
# scheduling a new job.
super unless lease_for(project_id).exists?
end
return unless project && project.repository.exists?
 
def perform(project_id)
if try_obtain_lease_for(project_id)
Rails.logger.
info("Obtained ProjectCacheWorker lease for project #{project_id}")
else
Rails.logger.
info("Could not obtain ProjectCacheWorker lease for project #{project_id}")
return
end
update_repository_size(project)
project.update_commit_count
 
update_caches(project_id)
project.repository.refresh_method_caches(refresh.map(&:to_sym))
end
 
def update_caches(project_id)
project = Project.find(project_id)
def update_repository_size(project)
return unless try_obtain_lease_for(project.id, :update_repository_size)
 
return unless project.repository.exists?
Rails.logger.info("Updating repository size for project #{project.id}")
 
project.update_repository_size
project.update_commit_count
if project.repository.root_ref
project.repository.build_cache
end
end
 
def try_obtain_lease_for(project_id)
self.class.lease_for(project_id).try_obtain
private
def try_obtain_lease_for(project_id, section)
Gitlab::ExclusiveLease.
new("project_cache_worker:#{project_id}:#{section}", timeout: LEASE_TIMEOUT).
try_obtain
end
end
Loading
Loading
@@ -1572,7 +1572,7 @@ describe Project, models: true do
end
 
it 'expires the avatar cache' do
expect(project.repository).to receive(:expire_avatar_cache).with(project.default_branch)
expect(project.repository).to receive(:expire_avatar_cache)
project.change_head(project.default_branch)
end
 
Loading
Loading
This diff is collapsed.
Loading
Loading
@@ -14,7 +14,7 @@ describe API::API, api: true do
 
describe "GET /projects/:id/repository/branches" do
it "returns an array of project branches" do
project.repository.expire_cache
project.repository.expire_all_method_caches
 
get api("/projects/#{project.id}/repository/branches", user)
expect(response).to have_http_status(200)
Loading
Loading
Loading
Loading
@@ -27,27 +27,14 @@ describe GitPushService, services: true do
 
it { is_expected.to be_truthy }
 
it 'flushes general cached data' do
expect(project.repository).to receive(:expire_cache).
with('master', newrev)
it 'calls the after_push_commit hook' do
expect(project.repository).to receive(:after_push_commit).with('master')
 
subject
end
 
it 'flushes the visible content cache' do
expect(project.repository).to receive(:expire_has_visible_content_cache)
subject
end
it 'flushes the branches cache' do
expect(project.repository).to receive(:expire_branches_cache)
subject
end
it 'flushes the branch count cache' do
expect(project.repository).to receive(:expire_branch_count_cache)
it 'calls the after_create_branch hook' do
expect(project.repository).to receive(:after_create_branch)
 
subject
end
Loading
Loading
@@ -56,21 +43,8 @@ describe GitPushService, services: true do
context 'existing branch' do
it { is_expected.to be_truthy }
 
it 'flushes general cached data' do
expect(project.repository).to receive(:expire_cache).
with('master', newrev)
subject
end
it 'does not flush the branches cache' do
expect(project.repository).not_to receive(:expire_branches_cache)
subject
end
it 'does not flush the branch count cache' do
expect(project.repository).not_to receive(:expire_branch_count_cache)
it 'calls the after_push_commit hook' do
expect(project.repository).to receive(:after_push_commit).with('master')
 
subject
end
Loading
Loading
@@ -81,27 +55,14 @@ describe GitPushService, services: true do
 
it { is_expected.to be_truthy }
 
it 'flushes the visible content cache' do
expect(project.repository).to receive(:expire_has_visible_content_cache)
subject
end
it 'flushes the branches cache' do
expect(project.repository).to receive(:expire_branches_cache)
subject
end
it 'flushes the branch count cache' do
expect(project.repository).to receive(:expire_branch_count_cache)
it 'calls the after_push_commit hook' do
expect(project.repository).to receive(:after_push_commit).with('master')
 
subject
end
 
it 'flushes general cached data' do
expect(project.repository).to receive(:expire_cache).
with('master', newrev)
it 'calls the after_remove_branch hook' do
expect(project.repository).to receive(:after_remove_branch)
 
subject
end
Loading
Loading
@@ -598,6 +559,51 @@ describe GitPushService, services: true do
end
end
 
describe '#update_caches' do
let(:service) do
described_class.new(project,
user,
oldrev: sample_commit.parent_id,
newrev: sample_commit.id,
ref: 'refs/heads/master')
end
context 'on the default branch' do
before do
allow(service).to receive(:is_default_branch?).and_return(true)
end
it 'flushes the caches of any special files that have been changed' do
commit = double(:commit)
diff = double(:diff, new_path: 'README.md')
expect(commit).to receive(:raw_diffs).with(deltas_only: true).
and_return([diff])
service.push_commits = [commit]
expect(ProjectCacheWorker).to receive(:perform_async).
with(project.id, %i(readme))
service.update_caches
end
end
context 'on a non-default branch' do
before do
allow(service).to receive(:is_default_branch?).and_return(false)
end
it 'does not flush any conditional caches' do
expect(ProjectCacheWorker).to receive(:perform_async).
with(project.id, []).
and_call_original
service.update_caches
end
end
end
def execute_service(project, user, oldrev, newrev, ref)
service = described_class.new(project, user, oldrev: oldrev, newrev: newrev, ref: ref )
service.execute
Loading
Loading
Loading
Loading
@@ -18,7 +18,7 @@ describe GitTagPushService, services: true do
end
 
it 'flushes general cached data' do
expect(project.repository).to receive(:expire_cache)
expect(project.repository).to receive(:before_push_tag)
 
subject
end
Loading
Loading
@@ -28,12 +28,6 @@ describe GitTagPushService, services: true do
 
subject
end
it 'flushes the tag count cache' do
expect(project.repository).to receive(:expire_tag_count_cache)
subject
end
end
 
describe "Git Tag Push Data" do
Loading
Loading
Loading
Loading
@@ -2,62 +2,78 @@ require 'spec_helper'
 
describe ProjectCacheWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
 
subject { described_class.new }
describe '.perform_async' do
it 'schedules the job when no lease exists' do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:exists?).
and_return(false)
describe '#perform' do
before do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).
and_return(true)
end
 
expect_any_instance_of(described_class).to receive(:perform)
context 'with a non-existing project' do
it 'does nothing' do
expect(worker).not_to receive(:update_repository_size)
 
described_class.perform_async(project.id)
worker.perform(-1)
end
end
 
it 'does not schedule the job when a lease exists' do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:exists?).
and_return(true)
context 'with an existing project without a repository' do
it 'does nothing' do
allow_any_instance_of(Repository).to receive(:exists?).and_return(false)
 
expect_any_instance_of(described_class).not_to receive(:perform)
expect(worker).not_to receive(:update_repository_size)
 
described_class.perform_async(project.id)
worker.perform(project.id)
end
end
end
 
describe '#perform' do
context 'when an exclusive lease can be obtained' do
before do
allow(subject).to receive(:try_obtain_lease_for).with(project.id).
and_return(true)
end
context 'with an existing project' do
it 'updates the repository size' do
expect(worker).to receive(:update_repository_size).and_call_original
 
it 'updates project cache data' do
expect_any_instance_of(Repository).to receive(:size)
expect_any_instance_of(Repository).to receive(:commit_count)
worker.perform(project.id)
end
 
expect_any_instance_of(Project).to receive(:update_repository_size)
expect_any_instance_of(Project).to receive(:update_commit_count)
it 'updates the commit count' do
expect_any_instance_of(Project).to receive(:update_commit_count).
and_call_original
 
subject.perform(project.id)
worker.perform(project.id)
end
 
it 'handles missing repository data' do
expect_any_instance_of(Repository).to receive(:exists?).and_return(false)
expect_any_instance_of(Repository).not_to receive(:size)
it 'refreshes the method caches' do
expect_any_instance_of(Repository).to receive(:refresh_method_caches).
with(%i(readme)).
and_call_original
 
subject.perform(project.id)
worker.perform(project.id, %i(readme))
end
end
end
 
context 'when an exclusive lease can not be obtained' do
it 'does nothing' do
allow(subject).to receive(:try_obtain_lease_for).with(project.id).
describe '#update_repository_size' do
context 'when a lease could not be obtained' do
it 'does not update the repository size' do
allow(worker).to receive(:try_obtain_lease_for).
with(project.id, :update_repository_size).
and_return(false)
 
expect(subject).not_to receive(:update_caches)
expect(project).not_to receive(:update_repository_size)
worker.update_repository_size(project)
end
end
context 'when a lease could be obtained' do
it 'updates the repository size' do
allow(worker).to receive(:try_obtain_lease_for).
with(project.id, :update_repository_size).
and_return(true)
expect(project).to receive(:update_repository_size).and_call_original
 
subject.perform(project.id)
worker.update_repository_size(project)
end
end
end
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment