summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab-ci.yml2
-rw-r--r--Gemfile2
-rw-r--r--Gemfile.lock2
-rw-r--r--app/assets/javascripts/vue_shared/components/icon.vue11
-rw-r--r--app/finders/runner_jobs_finder.rb22
-rw-r--r--app/models/concerns/has_variable.rb4
-rw-r--r--app/models/project.rb5
-rw-r--r--app/models/storage/hashed_project.rb1
-rw-r--r--app/policies/group_policy.rb2
-rw-r--r--app/policies/namespace_policy.rb1
-rw-r--r--app/services/projects/hashed_storage/migrate_attachments_service.rb54
-rw-r--r--app/services/projects/hashed_storage/migrate_repository_service.rb70
-rw-r--r--app/services/projects/hashed_storage_migration_service.rb62
-rw-r--r--app/uploaders/file_uploader.rb11
-rw-r--r--app/workers/project_migrate_hashed_storage_worker.rb26
-rw-r--r--changelogs/unreleased/40561-environment-scope-value-is-not-trimmed.yml5
-rw-r--r--changelogs/unreleased/40568-bump-seed-fu-to-2-3-7.yml5
-rw-r--r--changelogs/unreleased/default-values-for-mr-states.yml5
-rw-r--r--changelogs/unreleased/hashed-storage-attachments-migration-path.yml5
-rw-r--r--changelogs/unreleased/tm-feature-list-runners-jobs-api.yml5
-rw-r--r--changelogs/unreleased/tm-feature-namespace-by-id-api.yml5
-rw-r--r--db/migrate/20171124125042_add_default_values_to_merge_request_states.rb19
-rw-r--r--db/migrate/20171124125748_populate_missing_merge_request_statuses.rb50
-rw-r--r--db/migrate/20171124132536_make_merge_request_statuses_not_null.rb14
-rw-r--r--db/schema.rb6
-rw-r--r--doc/administration/raketasks/storage.md97
-rw-r--r--doc/api/namespaces.md52
-rw-r--r--doc/api/runners.md85
-rw-r--r--lib/api/entities.rb26
-rw-r--r--lib/api/helpers.rb22
-rw-r--r--lib/api/namespaces.rb10
-rw-r--r--lib/api/runners.rb23
-rw-r--r--lib/tasks/gitlab/storage.rake85
-rw-r--r--spec/finders/runner_jobs_finder_spec.rb39
-rw-r--r--spec/javascripts/vue_shared/components/icon_spec.js12
-rw-r--r--spec/lib/api/helpers_spec.rb109
-rw-r--r--spec/models/concerns/has_variable_spec.rb18
-rw-r--r--spec/policies/group_policy_spec.rb13
-rw-r--r--spec/policies/namespace_policy_spec.rb38
-rw-r--r--spec/requests/api/namespaces_spec.rb123
-rw-r--r--spec/requests/api/runners_spec.rb134
-rw-r--r--spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb63
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb76
-rw-r--r--spec/services/projects/hashed_storage_migration_service_spec.rb66
-rw-r--r--spec/workers/project_migrate_hashed_storage_worker_spec.rb52
45 files changed, 1355 insertions, 182 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 52e21152dd2..60a2b5d5b5b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -579,7 +579,7 @@ codequality:
script:
- cp .rubocop.yml .rubocop.yml.bak
- grep -v "rubocop-gitlab-security" .rubocop.yml.bak > .rubocop.yml
- - docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 analyze -f json > raw_codeclimate.json
+ - docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate analyze -f json > raw_codeclimate.json
- cat raw_codeclimate.json | docker run -i stedolan/jq -c 'map({check_name,fingerprint,location})' > codeclimate.json
- mv .rubocop.yml.bak .rubocop.yml
artifacts:
diff --git a/Gemfile b/Gemfile
index a422cee38e3..b6580c28eb7 100644
--- a/Gemfile
+++ b/Gemfile
@@ -111,7 +111,7 @@ gem 'google-api-client', '~> 0.13.6'
gem 'unf', '~> 0.1.4'
# Seed data
-gem 'seed-fu', '~> 2.3.5'
+gem 'seed-fu', '~> 2.3.7'
# Markdown and HTML processing
gem 'html-pipeline', '~> 1.11.0'
diff --git a/Gemfile.lock b/Gemfile.lock
index cfad693f933..16fd440279b 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -815,7 +815,7 @@ GEM
rake (>= 0.9, < 13)
sass (~> 3.4.20)
securecompare (1.0.0)
- seed-fu (2.3.6)
+ seed-fu (2.3.7)
activerecord (>= 3.1)
activesupport (>= 3.1)
select2-rails (3.5.9.3)
diff --git a/app/assets/javascripts/vue_shared/components/icon.vue b/app/assets/javascripts/vue_shared/components/icon.vue
index 8f116233e72..4216660da8c 100644
--- a/app/assets/javascripts/vue_shared/components/icon.vue
+++ b/app/assets/javascripts/vue_shared/components/icon.vue
@@ -12,6 +12,9 @@
/>
*/
+ // only allow classes in images.scss e.g. s12
+ const validSizes = [8, 12, 16, 18, 24, 32, 48, 72];
+
export default {
props: {
name: {
@@ -22,7 +25,10 @@
size: {
type: Number,
required: false,
- default: 0,
+ default: 16,
+ validator(value) {
+ return validSizes.includes(value);
+ },
},
cssClasses: {
@@ -42,10 +48,11 @@
},
};
</script>
+
<template>
<svg
:class="[iconSizeClass, cssClasses]">
- <use
+ <use
v-bind="{'xlink:href':spriteHref}"/>
</svg>
</template>
diff --git a/app/finders/runner_jobs_finder.rb b/app/finders/runner_jobs_finder.rb
new file mode 100644
index 00000000000..52340f94523
--- /dev/null
+++ b/app/finders/runner_jobs_finder.rb
@@ -0,0 +1,22 @@
+class RunnerJobsFinder
+ attr_reader :runner, :params
+
+ def initialize(runner, params = {})
+ @runner = runner
+ @params = params
+ end
+
+ def execute
+ items = @runner.builds
+ items = by_status(items)
+ items
+ end
+
+ private
+
+ def by_status(items)
+ return items unless HasStatus::AVAILABLE_STATUSES.include?(params[:status])
+
+ items.where(status: params[:status])
+ end
+end
diff --git a/app/models/concerns/has_variable.rb b/app/models/concerns/has_variable.rb
index 9585b5583dc..8a241e4374a 100644
--- a/app/models/concerns/has_variable.rb
+++ b/app/models/concerns/has_variable.rb
@@ -16,6 +16,10 @@ module HasVariable
key: Gitlab::Application.secrets.db_key_base,
algorithm: 'aes-256-cbc'
+ def key=(new_key)
+ super(new_key.to_s.strip)
+ end
+
def to_runner_variable
{ key: key, value: value, public: false }
end
diff --git a/app/models/project.rb b/app/models/project.rb
index e276bd2422d..85d580fe0fa 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -273,8 +273,9 @@ class Project < ActiveRecord::Base
scope :pending_delete, -> { where(pending_delete: true) }
scope :without_deleted, -> { where(pending_delete: false) }
- scope :with_hashed_storage, -> { where('storage_version >= 1') }
- scope :with_legacy_storage, -> { where(storage_version: [nil, 0]) }
+ scope :with_storage_feature, ->(feature) { where('storage_version >= :version', version: HASHED_STORAGE_FEATURES[feature]) }
+ scope :without_storage_feature, ->(feature) { where('storage_version < :version OR storage_version IS NULL', version: HASHED_STORAGE_FEATURES[feature]) }
+ scope :with_unmigrated_storage, -> { where('storage_version < :version OR storage_version IS NULL', version: LATEST_STORAGE_VERSION) }
scope :sorted_by_activity, -> { reorder(last_activity_at: :desc) }
scope :sorted_by_stars, -> { reorder('projects.star_count DESC') }
diff --git a/app/models/storage/hashed_project.rb b/app/models/storage/hashed_project.rb
index f025f40994e..fae1b64961a 100644
--- a/app/models/storage/hashed_project.rb
+++ b/app/models/storage/hashed_project.rb
@@ -4,7 +4,6 @@ module Storage
delegate :gitlab_shell, :repository_storage_path, to: :project
ROOT_PATH_PREFIX = '@hashed'.freeze
- STORAGE_VERSION = 1
def initialize(project)
@project = project
diff --git a/app/policies/group_policy.rb b/app/policies/group_policy.rb
index 8af9738d75c..a2518bc1080 100644
--- a/app/policies/group_policy.rb
+++ b/app/policies/group_policy.rb
@@ -34,6 +34,8 @@ class GroupPolicy < BasePolicy
rule { admin } .enable :read_group
rule { has_projects } .enable :read_group
+ rule { has_access }.enable :read_namespace
+
rule { developer }.enable :admin_milestones
rule { reporter }.enable :admin_label
diff --git a/app/policies/namespace_policy.rb b/app/policies/namespace_policy.rb
index 92213f0155e..eb01218eb0a 100644
--- a/app/policies/namespace_policy.rb
+++ b/app/policies/namespace_policy.rb
@@ -8,6 +8,7 @@ class NamespacePolicy < BasePolicy
rule { owner | admin }.policy do
enable :create_projects
enable :admin_namespace
+ enable :read_namespace
end
rule { personal_project & ~can_create_personal_project }.prevent :create_projects
diff --git a/app/services/projects/hashed_storage/migrate_attachments_service.rb b/app/services/projects/hashed_storage/migrate_attachments_service.rb
new file mode 100644
index 00000000000..f8aaec8a9c0
--- /dev/null
+++ b/app/services/projects/hashed_storage/migrate_attachments_service.rb
@@ -0,0 +1,54 @@
+module Projects
+ module HashedStorage
+ AttachmentMigrationError = Class.new(StandardError)
+
+ class MigrateAttachmentsService < BaseService
+ attr_reader :logger, :old_path, :new_path
+
+ def initialize(project, logger = nil)
+ @project = project
+ @logger = logger || Rails.logger
+ end
+
+ def execute
+ @old_path = project.full_path
+ @new_path = project.disk_path
+
+ origin = FileUploader.dynamic_path_segment(project)
+ project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
+ target = FileUploader.dynamic_path_segment(project)
+
+ result = move_folder!(origin, target)
+ project.save!
+
+ if result && block_given?
+ yield
+ end
+
+ result
+ end
+
+ private
+
+ def move_folder!(old_path, new_path)
+ unless File.directory?(old_path)
+ logger.info("Skipped attachments migration from '#{old_path}' to '#{new_path}', source path doesn't exist or is not a directory (PROJECT_ID=#{project.id})")
+ return
+ end
+
+ if File.exist?(new_path)
+ logger.error("Cannot migrate attachments from '#{old_path}' to '#{new_path}', target path already exist (PROJECT_ID=#{project.id})")
+ raise AttachmentMigrationError, "Target path '#{new_path}' already exist"
+ end
+
+ # Create hashed storage base path folder
+ FileUtils.mkdir_p(File.dirname(new_path))
+
+ FileUtils.mv(old_path, new_path)
+ logger.info("Migrated project attachments from '#{old_path}' to '#{new_path}' (PROJECT_ID=#{project.id})")
+
+ true
+ end
+ end
+ end
+end
diff --git a/app/services/projects/hashed_storage/migrate_repository_service.rb b/app/services/projects/hashed_storage/migrate_repository_service.rb
new file mode 100644
index 00000000000..7212e7524ab
--- /dev/null
+++ b/app/services/projects/hashed_storage/migrate_repository_service.rb
@@ -0,0 +1,70 @@
+module Projects
+ module HashedStorage
+ class MigrateRepositoryService < BaseService
+ include Gitlab::ShellAdapter
+
+ attr_reader :old_disk_path, :new_disk_path, :old_wiki_disk_path, :old_storage_version, :logger
+
+ def initialize(project, logger = nil)
+ @project = project
+ @logger = logger || Rails.logger
+ end
+
+ def execute
+ @old_disk_path = project.disk_path
+ has_wiki = project.wiki.repository_exists?
+
+ @old_storage_version = project.storage_version
+ project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:repository]
+ project.ensure_storage_path_exists
+
+ @new_disk_path = project.disk_path
+
+ result = move_repository(@old_disk_path, @new_disk_path)
+
+ if has_wiki
+ @old_wiki_disk_path = "#{@old_disk_path}.wiki"
+ result &&= move_repository("#{@old_wiki_disk_path}", "#{@new_disk_path}.wiki")
+ end
+
+ unless result
+ rollback_folder_move
+ project.storage_version = nil
+ end
+
+ project.repository_read_only = false
+ project.save!
+
+ if result && block_given?
+ yield
+ end
+
+ result
+ end
+
+ private
+
+ def move_repository(from_name, to_name)
+ from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git")
+ to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git")
+
+ # If we don't find the repository on either original or target we should log that as it could be an issue if the
+ # project was not originally empty.
+ if !from_exists && !to_exists
+ logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..."
+ return false
+ elsif !from_exists
+ # Repository have been moved already.
+ return true
+ end
+
+ gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
+ end
+
+ def rollback_folder_move
+ move_repository(@new_disk_path, @old_disk_path)
+ move_repository("#{@new_disk_path}.wiki", "#{@old_disk_path}.wiki")
+ end
+ end
+ end
+end
diff --git a/app/services/projects/hashed_storage_migration_service.rb b/app/services/projects/hashed_storage_migration_service.rb
index f5945f3b87f..662702c1db5 100644
--- a/app/services/projects/hashed_storage_migration_service.rb
+++ b/app/services/projects/hashed_storage_migration_service.rb
@@ -1,68 +1,22 @@
module Projects
class HashedStorageMigrationService < BaseService
- include Gitlab::ShellAdapter
-
- attr_reader :old_disk_path, :new_disk_path
+ attr_reader :logger
def initialize(project, logger = nil)
@project = project
- @logger ||= Rails.logger
+ @logger = logger || Rails.logger
end
def execute
- return if project.hashed_storage?(:repository)
-
- @old_disk_path = project.disk_path
- has_wiki = project.wiki.repository_exists?
-
- project.storage_version = Storage::HashedProject::STORAGE_VERSION
- project.ensure_storage_path_exists
-
- @new_disk_path = project.disk_path
-
- result = move_repository(@old_disk_path, @new_disk_path)
-
- if has_wiki
- result &&= move_repository("#{@old_disk_path}.wiki", "#{@new_disk_path}.wiki")
- end
-
- unless result
- rollback_folder_move
- return
+ # Migrate repository from Legacy to Hashed Storage
+ unless project.hashed_storage?(:repository)
+ return unless HashedStorage::MigrateRepositoryService.new(project, logger).execute
end
- project.repository_read_only = false
- project.save!
-
- block_given? ? yield : result
- end
-
- private
-
- def move_repository(from_name, to_name)
- from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git")
- to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git")
-
- # If we don't find the repository on either original or target we should log that as it could be an issue if the
- # project was not originally empty.
- if !from_exists && !to_exists
- logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..."
- return false
- elsif !from_exists
- # Repository have been moved already.
- return true
+ # Migrate attachments from Legacy to Hashed Storage
+ unless project.hashed_storage?(:attachments)
+ HashedStorage::MigrateAttachmentsService.new(project, logger).execute
end
-
- gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
- end
-
- def rollback_folder_move
- move_repository(@new_disk_path, @old_disk_path)
- move_repository("#{@new_disk_path}.wiki", "#{@old_disk_path}.wiki")
- end
-
- def logger
- @logger
end
end
end
diff --git a/app/uploaders/file_uploader.rb b/app/uploaders/file_uploader.rb
index f4a5cf75018..71658df5b41 100644
--- a/app/uploaders/file_uploader.rb
+++ b/app/uploaders/file_uploader.rb
@@ -31,12 +31,19 @@ class FileUploader < GitlabUploader
# Returns a String without a trailing slash
def self.dynamic_path_segment(project)
if project.hashed_storage?(:attachments)
- File.join(CarrierWave.root, base_dir, project.disk_path)
+ dynamic_path_builder(project.disk_path)
else
- File.join(CarrierWave.root, base_dir, project.full_path)
+ dynamic_path_builder(project.full_path)
end
end
+ # Auxiliary method to build dynamic path segment when not using a project model
+ #
+ # Prefer to use the `.dynamic_path_segment` as it includes Hashed Storage specific logic
+ def self.dynamic_path_builder(path)
+ File.join(CarrierWave.root, base_dir, path)
+ end
+
attr_accessor :model
attr_reader :secret
diff --git a/app/workers/project_migrate_hashed_storage_worker.rb b/app/workers/project_migrate_hashed_storage_worker.rb
index ca276d7801c..127aa6b9d7d 100644
--- a/app/workers/project_migrate_hashed_storage_worker.rb
+++ b/app/workers/project_migrate_hashed_storage_worker.rb
@@ -2,10 +2,34 @@ class ProjectMigrateHashedStorageWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
+ LEASE_TIMEOUT = 30.seconds.to_i
+
def perform(project_id)
project = Project.find_by(id: project_id)
return if project.nil? || project.pending_delete?
- ::Projects::HashedStorageMigrationService.new(project, logger).execute
+ uuid = lease_for(project_id).try_obtain
+ if uuid
+ ::Projects::HashedStorageMigrationService.new(project, logger).execute
+ else
+ false
+ end
+ rescue => ex
+ cancel_lease_for(project_id, uuid) if uuid
+ raise ex
+ end
+
+ def lease_for(project_id)
+ Gitlab::ExclusiveLease.new(lease_key(project_id), timeout: LEASE_TIMEOUT)
+ end
+
+ private
+
+ def lease_key(project_id)
+ "project_migrate_hashed_storage_worker:#{project_id}"
+ end
+
+ def cancel_lease_for(project_id, uuid)
+ Gitlab::ExclusiveLease.cancel(lease_key(project_id), uuid)
end
end
diff --git a/changelogs/unreleased/40561-environment-scope-value-is-not-trimmed.yml b/changelogs/unreleased/40561-environment-scope-value-is-not-trimmed.yml
new file mode 100644
index 00000000000..e0e3ddbdaa8
--- /dev/null
+++ b/changelogs/unreleased/40561-environment-scope-value-is-not-trimmed.yml
@@ -0,0 +1,5 @@
+---
+title: Strip leading & trailing whitespaces in CI/CD secret variable keys
+merge_request: 15615
+author:
+type: fixed
diff --git a/changelogs/unreleased/40568-bump-seed-fu-to-2-3-7.yml b/changelogs/unreleased/40568-bump-seed-fu-to-2-3-7.yml
new file mode 100644
index 00000000000..708269d5c83
--- /dev/null
+++ b/changelogs/unreleased/40568-bump-seed-fu-to-2-3-7.yml
@@ -0,0 +1,5 @@
+---
+title: Upgrade seed-fu to 2.3.7
+merge_request: 15607
+author: Takuya Noguchi
+type: other
diff --git a/changelogs/unreleased/default-values-for-mr-states.yml b/changelogs/unreleased/default-values-for-mr-states.yml
new file mode 100644
index 00000000000..f873a5335d0
--- /dev/null
+++ b/changelogs/unreleased/default-values-for-mr-states.yml
@@ -0,0 +1,5 @@
+---
+title: Fix defaults for MR states and merge statuses
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/hashed-storage-attachments-migration-path.yml b/changelogs/unreleased/hashed-storage-attachments-migration-path.yml
new file mode 100644
index 00000000000..32535437046
--- /dev/null
+++ b/changelogs/unreleased/hashed-storage-attachments-migration-path.yml
@@ -0,0 +1,5 @@
+---
+title: Hashed Storage migration script now supports migrating project attachments
+merge_request: 15352
+author:
+type: added
diff --git a/changelogs/unreleased/tm-feature-list-runners-jobs-api.yml b/changelogs/unreleased/tm-feature-list-runners-jobs-api.yml
new file mode 100644
index 00000000000..d75a2b68c30
--- /dev/null
+++ b/changelogs/unreleased/tm-feature-list-runners-jobs-api.yml
@@ -0,0 +1,5 @@
+---
+title: New API endpoint - list jobs for a specified runner
+merge_request: 15432
+author:
+type: added
diff --git a/changelogs/unreleased/tm-feature-namespace-by-id-api.yml b/changelogs/unreleased/tm-feature-namespace-by-id-api.yml
new file mode 100644
index 00000000000..bc4a8949d28
--- /dev/null
+++ b/changelogs/unreleased/tm-feature-namespace-by-id-api.yml
@@ -0,0 +1,5 @@
+---
+title: Add new API endpoint - get a namespace by ID
+merge_request: 15442
+author:
+type: added
diff --git a/db/migrate/20171124125042_add_default_values_to_merge_request_states.rb b/db/migrate/20171124125042_add_default_values_to_merge_request_states.rb
new file mode 100644
index 00000000000..d08863c3b78
--- /dev/null
+++ b/db/migrate/20171124125042_add_default_values_to_merge_request_states.rb
@@ -0,0 +1,19 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class AddDefaultValuesToMergeRequestStates < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ def up
+ change_column_default :merge_requests, :state, :opened
+ change_column_default :merge_requests, :merge_status, :unchecked
+ end
+
+ def down
+ change_column_default :merge_requests, :state, nil
+ change_column_default :merge_requests, :merge_status, nil
+ end
+end
diff --git a/db/migrate/20171124125748_populate_missing_merge_request_statuses.rb b/db/migrate/20171124125748_populate_missing_merge_request_statuses.rb
new file mode 100644
index 00000000000..72fbab59f4c
--- /dev/null
+++ b/db/migrate/20171124125748_populate_missing_merge_request_statuses.rb
@@ -0,0 +1,50 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class PopulateMissingMergeRequestStatuses < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ class MergeRequest < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'merge_requests'
+ end
+
+ def up
+ say 'Populating missing merge_requests.state values'
+
+ # GitLab.com has no rows where "state" is NULL, and technically this should
+ # never happen. However it doesn't hurt to be 100% certain.
+ MergeRequest.where(state: nil).each_batch do |batch|
+ batch.update_all(state: 'opened')
+ end
+
+ say 'Populating missing merge_requests.merge_status values. ' \
+ 'This will take a few minutes...'
+
+ # GitLab.com has 66 880 rows where "merge_status" is NULL, dating back all
+ # the way to 2011.
+ MergeRequest.where(merge_status: nil).each_batch(of: 10_000) do |batch|
+ batch.update_all(merge_status: 'unchecked')
+
+ # We want to give PostgreSQL some time to vacuum any dead tuples. In
+ # production we see it takes roughly 1 minute for a vacuuming run to clear
+ # out 10-20k dead tuples, so we'll wait for 90 seconds between every
+ # batch.
+ sleep(90) if sleep?
+ end
+ end
+
+ def down
+ # Reverting this makes no sense.
+ end
+
+ def sleep?
+ Rails.env.staging? || Rails.env.production?
+ end
+end
diff --git a/db/migrate/20171124132536_make_merge_request_statuses_not_null.rb b/db/migrate/20171124132536_make_merge_request_statuses_not_null.rb
new file mode 100644
index 00000000000..4bb09126036
--- /dev/null
+++ b/db/migrate/20171124132536_make_merge_request_statuses_not_null.rb
@@ -0,0 +1,14 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class MakeMergeRequestStatusesNotNull < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ # Set this constant to true if this migration requires downtime.
+ DOWNTIME = false
+
+ def change
+ change_column_null :merge_requests, :state, false
+ change_column_null :merge_requests, :merge_status, false
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index d10561099b7..804bc8d6e37 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema.define(version: 20171121144800) do
+ActiveRecord::Schema.define(version: 20171124132536) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
@@ -1049,8 +1049,8 @@ ActiveRecord::Schema.define(version: 20171121144800) do
t.datetime "created_at"
t.datetime "updated_at"
t.integer "milestone_id"
- t.string "state"
- t.string "merge_status"
+ t.string "state", default: "opened", null: false
+ t.string "merge_status", default: "unchecked", null: false
t.integer "target_project_id", null: false
t.integer "iid"
t.text "description"
diff --git a/doc/administration/raketasks/storage.md b/doc/administration/raketasks/storage.md
index bac8fa4bd9d..6ec5baeb6e3 100644
--- a/doc/administration/raketasks/storage.md
+++ b/doc/administration/raketasks/storage.md
@@ -1,10 +1,43 @@
# Repository Storage Rake Tasks
This is a collection of rake tasks you can use to help you list and migrate
-existing projects from Legacy storage to the new Hashed storage type.
+existing projects and attachments associated with it from Legacy storage to
+the new Hashed storage type.
You can read more about the storage types [here][storage-types].
+## Migrate existing projects to Hashed storage
+
+Before migrating your existing projects, you should
+[enable hashed storage][storage-migration] for the new projects as well.
+
+This task will schedule all your existing projects and attachments associated with it to be migrated to the
+**Hashed** storage type:
+
+**Omnibus Installation**
+
+```bash
+gitlab-rake gitlab:storage:migrate_to_hashed
+```
+
+**Source Installation**
+
+```bash
+rake gitlab:storage:migrate_to_hashed
+
+```
+
+You can monitor the progress in the _Admin > Monitoring > Background jobs_ screen.
+There is a specific Queue you can watch to see how long it will take to finish: **project_migrate_hashed_storage**
+
+After it reaches zero, you can confirm every project has been migrated by running the commands bellow.
+If you find it necessary, you can run this migration script again to schedule missing projects.
+
+Any error or warning will be logged in the sidekiq's log file.
+
+You only need the `gitlab:storage:migrate_to_hashed` rake task to migrate your repositories, but we have additional
+commands below that helps you inspect projects and attachments in both legacy and hashed storage.
+
## List projects on Legacy storage
To have a simple summary of projects using **Legacy** storage:
@@ -73,35 +106,73 @@ rake gitlab:storage:list_hashed_projects
```
-## Migrate existing projects to Hashed storage
+## List attachments on Legacy storage
-Before migrating your existing projects, you should
-[enable hashed storage][storage-migration] for the new projects as well.
+To have a simple summary of project attachments using **Legacy** storage:
-This task will schedule all your existing projects to be migrated to the
-**Hashed** storage type:
+**Omnibus Installation**
+
+```bash
+gitlab-rake gitlab:storage:legacy_attachments
+```
+
+**Source Installation**
+
+```bash
+rake gitlab:storage:legacy_attachments
+
+```
+
+------
+
+To list project attachments using **Legacy** storage:
**Omnibus Installation**
```bash
-gitlab-rake gitlab:storage:migrate_to_hashed
+gitlab-rake gitlab:storage:list_legacy_attachments
```
**Source Installation**
```bash
-rake gitlab:storage:migrate_to_hashed
+rake gitlab:storage:list_legacy_attachments
```
-You can monitor the progress in the _Admin > Monitoring > Background jobs_ screen.
-There is a specific Queue you can watch to see how long it will take to finish: **project_migrate_hashed_storage**
+## List attachments on Hashed storage
-After it reaches zero, you can confirm every project has been migrated by running the commands above.
-If you find it necessary, you can run this migration script again to schedule missing projects.
+To have a simple summary of project attachments using **Hashed** storage:
+
+**Omnibus Installation**
+
+```bash
+gitlab-rake gitlab:storage:hashed_attachments
+```
-Any error or warning will be logged in the sidekiq log file.
+**Source Installation**
+
+```bash
+rake gitlab:storage:hashed_attachments
+
+```
+
+------
+
+To list project attachments using **Hashed** storage:
+
+**Omnibus Installation**
+```bash
+gitlab-rake gitlab:storage:list_hashed_attachments
+```
+
+**Source Installation**
+
+```bash
+rake gitlab:storage:list_hashed_attachments
+
+```
[storage-types]: ../repository_storage_types.md
[storage-migration]: ../repository_storage_types.md#how-to-migrate-to-hashed-storage
diff --git a/doc/api/namespaces.md b/doc/api/namespaces.md
index 5c0bebbaeb0..25cae5ce1f9 100644
--- a/doc/api/namespaces.md
+++ b/doc/api/namespaces.md
@@ -89,3 +89,55 @@ Example response:
}
]
```
+
+## Get namespace by ID
+
+Get a namespace by ID.
+
+```
+GET /namespaces/:id
+```
+
+| Attribute | Type | Required | Description |
+| --------- | ---- | -------- | ----------- |
+| `id` | integer/string | yes | ID or path of the namespace |
+
+Example request:
+
+```bash
+curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/namespaces/2
+```
+
+Example response:
+
+```json
+{
+ "id": 2,
+ "name": "group1",
+ "path": "group1",
+ "kind": "group",
+ "full_path": "group1",
+ "parent_id": "null",
+ "members_count_with_descendants": 2
+}
+```
+
+Example request:
+
+```bash
+curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/namespaces/group1
+```
+
+Example response:
+
+```json
+{
+ "id": 2,
+ "name": "group1",
+ "path": "group1",
+ "kind": "group",
+ "full_path": "group1",
+ "parent_id": "null",
+ "members_count_with_descendants": 2
+}
+```
diff --git a/doc/api/runners.md b/doc/api/runners.md
index 6304a496f94..015b09a745e 100644
--- a/doc/api/runners.md
+++ b/doc/api/runners.md
@@ -215,6 +215,91 @@ DELETE /runners/:id
curl --request DELETE --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/runners/6"
```
+## List runner's jobs
+
+List jobs that are being processed or were processed by specified Runner.
+
+```
+GET /runners/:id/jobs
+```
+
+| Attribute | Type | Required | Description |
+|-----------|---------|----------|---------------------|
+| `id` | integer | yes | The ID of a runner |
+| `status` | string | no | Status of the job; one of: `running`, `success`, `failed`, `canceled` |
+
+```
+curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/runners/1/jobs?status=running"
+```
+
+Example response:
+
+```json
+[
+ {
+ "id": 2,
+ "status": "running",
+ "stage": "test",
+ "name": "test",
+ "ref": "master",
+ "tag": false,
+ "coverage": null,
+ "created_at": "2017-11-16T08:50:29.000Z",
+ "started_at": "2017-11-16T08:51:29.000Z",
+ "finished_at": "2017-11-16T08:53:29.000Z",
+ "duration": 120,
+ "user": {
+ "id": 1,
+ "name": "John Doe2",
+ "username": "user2",
+ "state": "active",
+ "avatar_url": "http://www.gravatar.com/avatar/c922747a93b40d1ea88262bf1aebee62?s=80&d=identicon",
+ "web_url": "http://localhost/user2",
+ "created_at": "2017-11-16T18:38:46.000Z",
+ "bio": null,
+ "location": null,
+ "skype": "",
+ "linkedin": "",
+ "twitter": "",
+ "website_url": "",
+ "organization": null
+ },
+ "commit": {
+ "id": "97de212e80737a608d939f648d959671fb0a0142",
+ "short_id": "97de212e",
+ "title": "Update configuration\r",
+ "created_at": "2017-11-16T08:50:28.000Z",
+ "parent_ids": [
+ "1b12f15a11fc6e62177bef08f47bc7b5ce50b141",
+ "498214de67004b1da3d820901307bed2a68a8ef6"
+ ],
+ "message": "See merge request !123",
+ "author_name": "John Doe2",
+ "author_email": "user2@example.org",
+ "authored_date": "2017-11-16T08:50:27.000Z",
+ "committer_name": "John Doe2",
+ "committer_email": "user2@example.org",
+ "committed_date": "2017-11-16T08:50:27.000Z"
+ },
+ "pipeline": {
+ "id": 2,
+ "sha": "97de212e80737a608d939f648d959671fb0a0142",
+ "ref": "master",
+ "status": "running"
+ },
+ "project": {
+ "id": 1,
+ "description": null,
+ "name": "project1",
+ "name_with_namespace": "John Doe2 / project1",
+ "path": "project1",
+ "path_with_namespace": "namespace1/project1",
+ "created_at": "2017-11-16T18:38:46.620Z"
+ }
+ }
+]
+```
+
## List project's runners
List all runners (specific and shared) available in the project. Shared runners
diff --git a/lib/api/entities.rb b/lib/api/entities.rb
index 7d5d68c8f14..ce332fe85d2 100644
--- a/lib/api/entities.rb
+++ b/lib/api/entities.rb
@@ -80,16 +80,21 @@ module API
expose :group_access, as: :group_access_level
end
- class BasicProjectDetails < Grape::Entity
- expose :id, :description, :default_branch, :tag_list
- expose :ssh_url_to_repo, :http_url_to_repo, :web_url
+ class ProjectIdentity < Grape::Entity
+ expose :id, :description
expose :name, :name_with_namespace
expose :path, :path_with_namespace
+ expose :created_at
+ end
+
+ class BasicProjectDetails < ProjectIdentity
+ expose :default_branch, :tag_list
+ expose :ssh_url_to_repo, :http_url_to_repo, :web_url
expose :avatar_url do |project, options|
project.avatar_url(only_path: false)
end
expose :star_count, :forks_count
- expose :created_at, :last_activity_at
+ expose :last_activity_at
end
class Project < BasicProjectDetails
@@ -827,17 +832,24 @@ module API
expose :id, :sha, :ref, :status
end
- class Job < Grape::Entity
+ class JobBasic < Grape::Entity
expose :id, :status, :stage, :name, :ref, :tag, :coverage
expose :created_at, :started_at, :finished_at
expose :duration
expose :user, with: User
- expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? }
expose :commit, with: Commit
- expose :runner, with: Runner
expose :pipeline, with: PipelineBasic
end
+ class Job < JobBasic
+ expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? }
+ expose :runner, with: Runner
+ end
+
+ class JobBasicWithProject < JobBasic
+ expose :project, with: ProjectIdentity
+ end
+
class Trigger < Grape::Entity
expose :id
expose :token, :description
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index b26c61ab8da..686bf7a3c2b 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -50,6 +50,10 @@ module API
initial_current_user != current_user
end
+ def user_namespace
+ @user_namespace ||= find_namespace!(params[:id])
+ end
+
def user_group
@group ||= find_group!(params[:id])
end
@@ -112,6 +116,24 @@ module API
end
end
+ def find_namespace(id)
+ if id.to_s =~ /^\d+$/
+ Namespace.find_by(id: id)
+ else
+ Namespace.find_by_full_path(id)
+ end
+ end
+
+ def find_namespace!(id)
+ namespace = find_namespace(id)
+
+ if can?(current_user, :read_namespace, namespace)
+ namespace
+ else
+ not_found!('Namespace')
+ end
+ end
+
def find_project_label(id)
label = available_labels.find_by_id(id) || available_labels.find_by_title(id)
label || not_found!('Label')
diff --git a/lib/api/namespaces.rb b/lib/api/namespaces.rb
index f1eaff6b0eb..32b77aedba8 100644
--- a/lib/api/namespaces.rb
+++ b/lib/api/namespaces.rb
@@ -19,6 +19,16 @@ module API
present paginate(namespaces), with: Entities::Namespace, current_user: current_user
end
+
+ desc 'Get a namespace by ID' do
+ success Entities::Namespace
+ end
+ params do
+ requires :id, type: String, desc: "Namespace's ID or path"
+ end
+ get ':id' do
+ present user_namespace, with: Entities::Namespace, current_user: current_user
+ end
end
end
end
diff --git a/lib/api/runners.rb b/lib/api/runners.rb
index e816fcdd928..996457c5dfe 100644
--- a/lib/api/runners.rb
+++ b/lib/api/runners.rb
@@ -84,6 +84,23 @@ module API
destroy_conditionally!(runner)
end
+
+ desc 'List jobs running on a runner' do
+ success Entities::JobBasicWithProject
+ end
+ params do
+ requires :id, type: Integer, desc: 'The ID of the runner'
+ optional :status, type: String, desc: 'Status of the job', values: Ci::Build::AVAILABLE_STATUSES
+ use :pagination
+ end
+ get ':id/jobs' do
+ runner = get_runner(params[:id])
+ authenticate_list_runners_jobs!(runner)
+
+ jobs = RunnerJobsFinder.new(runner, params).execute
+
+ present paginate(jobs), with: Entities::JobBasicWithProject
+ end
end
params do
@@ -192,6 +209,12 @@ module API
forbidden!("No access granted") unless user_can_access_runner?(runner)
end
+ def authenticate_list_runners_jobs!(runner)
+ return if current_user.admin?
+
+ forbidden!("No access granted") unless user_can_access_runner?(runner)
+ end
+
def user_can_access_runner?(runner)
current_user.ci_authorized_runners.exists?(runner.id)
end
diff --git a/lib/tasks/gitlab/storage.rake b/lib/tasks/gitlab/storage.rake
index e05be4a3405..8ac73bc8ff2 100644
--- a/lib/tasks/gitlab/storage.rake
+++ b/lib/tasks/gitlab/storage.rake
@@ -2,10 +2,10 @@ namespace :gitlab do
namespace :storage do
desc 'GitLab | Storage | Migrate existing projects to Hashed Storage'
task migrate_to_hashed: :environment do
- legacy_projects_count = Project.with_legacy_storage.count
+ legacy_projects_count = Project.with_unmigrated_storage.count
if legacy_projects_count == 0
- puts 'There are no projects using legacy storage. Nothing to do!'
+ puts 'There are no projects requiring storage migration. Nothing to do!'
next
end
@@ -23,22 +23,42 @@ namespace :gitlab do
desc 'Gitlab | Storage | Summary of existing projects using Legacy Storage'
task legacy_projects: :environment do
- projects_summary(Project.with_legacy_storage)
+ relation_summary('projects', Project.without_storage_feature(:repository))
end
desc 'Gitlab | Storage | List existing projects using Legacy Storage'
task list_legacy_projects: :environment do
- projects_list(Project.with_legacy_storage)
+ projects_list('projects using Legacy Storage', Project.without_storage_feature(:repository))
end
desc 'Gitlab | Storage | Summary of existing projects using Hashed Storage'
task hashed_projects: :environment do
- projects_summary(Project.with_hashed_storage)
+ relation_summary('projects using Hashed Storage', Project.with_storage_feature(:repository))
end
desc 'Gitlab | Storage | List existing projects using Hashed Storage'
task list_hashed_projects: :environment do
- projects_list(Project.with_hashed_storage)
+ projects_list('projects using Hashed Storage', Project.with_storage_feature(:repository))
+ end
+
+ desc 'Gitlab | Storage | Summary of project attachments using Legacy Storage'
+ task legacy_attachments: :environment do
+ relation_summary('attachments using Legacy Storage', legacy_attachments_relation)
+ end
+
+ desc 'Gitlab | Storage | List existing project attachments using Legacy Storage'
+ task list_legacy_attachments: :environment do
+ attachments_list('attachments using Legacy Storage', legacy_attachments_relation)
+ end
+
+ desc 'Gitlab | Storage | Summary of project attachments using Hashed Storage'
+ task hashed_attachments: :environment do
+ relation_summary('attachments using Hashed Storage', hashed_attachments_relation)
+ end
+
+ desc 'Gitlab | Storage | List existing project attachments using Hashed Storage'
+ task list_hashed_attachments: :environment do
+ attachments_list('attachments using Hashed Storage', hashed_attachments_relation)
end
def batch_size
@@ -46,29 +66,43 @@ namespace :gitlab do
end
def project_id_batches(&block)
- Project.with_legacy_storage.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches
+ Project.with_unmigrated_storage.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches
ids = relation.pluck(:id)
yield ids.min, ids.max
end
end
- def projects_summary(relation)
- projects_count = relation.count
- puts "* Found #{projects_count} projects".color(:green)
+ def legacy_attachments_relation
+ Upload.joins(<<~SQL).where('projects.storage_version < :version OR projects.storage_version IS NULL', version: Project::HASHED_STORAGE_FEATURES[:attachments])
+ JOIN projects
+ ON (uploads.model_type='Project' AND uploads.model_id=projects.id)
+ SQL
+ end
+
+ def hashed_attachments_relation
+ Upload.joins(<<~SQL).where('projects.storage_version >= :version', version: Project::HASHED_STORAGE_FEATURES[:attachments])
+ JOIN projects
+ ON (uploads.model_type='Project' AND uploads.model_id=projects.id)
+ SQL
+ end
+
+ def relation_summary(relation_name, relation)
+ relation_count = relation.count
+ puts "* Found #{relation_count} #{relation_name}".color(:green)
- projects_count
+ relation_count
end
- def projects_list(relation)
- projects_count = projects_summary(relation)
+ def projects_list(relation_name, relation)
+ relation_count = relation_summary(relation_name, relation)
projects = relation.with_route
limit = ENV.fetch('LIMIT', 500).to_i
- return unless projects_count > 0
+ return unless relation_count > 0
- puts " ! Displaying first #{limit} projects..." if projects_count > limit
+ puts " ! Displaying first #{limit} #{relation_name}..." if relation_count > limit
counter = 0
projects.find_in_batches(batch_size: batch_size) do |batch|
@@ -81,5 +115,26 @@ namespace :gitlab do
end
end
end
+
+ def attachments_list(relation_name, relation)
+ relation_count = relation_summary(relation_name, relation)
+
+ limit = ENV.fetch('LIMIT', 500).to_i
+
+ return unless relation_count > 0
+
+ puts " ! Displaying first #{limit} #{relation_name}..." if relation_count > limit
+
+ counter = 0
+ relation.find_in_batches(batch_size: batch_size) do |batch|
+ batch.each do |upload|
+ counter += 1
+
+ puts " - #{upload.path} (id: #{upload.id})".color(:red)
+
+ return if counter >= limit # rubocop:disable Lint/NonLocalExitFromIterator
+ end
+ end
+ end
end
end
diff --git a/spec/finders/runner_jobs_finder_spec.rb b/spec/finders/runner_jobs_finder_spec.rb
new file mode 100644
index 00000000000..4275b1a7ff1
--- /dev/null
+++ b/spec/finders/runner_jobs_finder_spec.rb
@@ -0,0 +1,39 @@
+require 'spec_helper'
+
+describe RunnerJobsFinder do
+ let(:project) { create(:project) }
+ let(:runner) { create(:ci_runner, :shared) }
+
+ subject { described_class.new(runner, params).execute }
+
+ describe '#execute' do
+ context 'when params is empty' do
+ let(:params) { {} }
+ let!(:job) { create(:ci_build, runner: runner, project: project) }
+ let!(:job1) { create(:ci_build, project: project) }
+
+ it 'returns all jobs assigned to Runner' do
+ is_expected.to match_array(job)
+ is_expected.not_to match_array(job1)
+ end
+ end
+
+ context 'when params contains status' do
+ HasStatus::AVAILABLE_STATUSES.each do |target_status|
+ context "when status is #{target_status}" do
+ let(:params) { { status: target_status } }
+ let!(:job) { create(:ci_build, runner: runner, project: project, status: target_status) }
+
+ before do
+ exception_status = HasStatus::AVAILABLE_STATUSES - [target_status]
+ create(:ci_build, runner: runner, project: project, status: exception_status.first)
+ end
+
+ it 'returns matched job' do
+ is_expected.to eq([job])
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/javascripts/vue_shared/components/icon_spec.js b/spec/javascripts/vue_shared/components/icon_spec.js
index 104da4473ce..a22b6bd3a67 100644
--- a/spec/javascripts/vue_shared/components/icon_spec.js
+++ b/spec/javascripts/vue_shared/components/icon_spec.js
@@ -11,7 +11,7 @@ describe('Sprite Icon Component', function () {
icon = mountComponent(IconComponent, {
name: 'test',
- size: 99,
+ size: 32,
cssClasses: 'extraclasses',
});
});
@@ -34,12 +34,18 @@ describe('Sprite Icon Component', function () {
});
it('should properly compute iconSizeClass', function () {
- expect(icon.iconSizeClass).toBe('s99');
+ expect(icon.iconSizeClass).toBe('s32');
+ });
+
+ it('forbids invalid size prop', () => {
+ expect(icon.$options.props.size.validator(NaN)).toBeFalsy();
+ expect(icon.$options.props.size.validator(0)).toBeFalsy();
+ expect(icon.$options.props.size.validator(9001)).toBeFalsy();
});
it('should properly render img css', function () {
const classList = icon.$el.classList;
- const containsSizeClass = classList.contains('s99');
+ const containsSizeClass = classList.contains('s32');
const containsCustomClass = classList.contains('extraclasses');
expect(containsSizeClass).toBe(true);
expect(containsCustomClass).toBe(true);
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
new file mode 100644
index 00000000000..3c4deba4712
--- /dev/null
+++ b/spec/lib/api/helpers_spec.rb
@@ -0,0 +1,109 @@
+require 'spec_helper'
+
+describe API::Helpers do
+ subject { Class.new.include(described_class).new }
+
+ describe '#find_namespace' do
+ let(:namespace) { create(:namespace) }
+
+ shared_examples 'namespace finder' do
+ context 'when namespace exists' do
+ it 'returns requested namespace' do
+ expect(subject.find_namespace(existing_id)).to eq(namespace)
+ end
+ end
+
+ context "when namespace doesn't exists" do
+ it 'returns nil' do
+ expect(subject.find_namespace(non_existing_id)).to be_nil
+ end
+ end
+ end
+
+ context 'when ID is used as an argument' do
+ let(:existing_id) { namespace.id }
+ let(:non_existing_id) { 9999 }
+
+ it_behaves_like 'namespace finder'
+ end
+
+ context 'when PATH is used as an argument' do
+ let(:existing_id) { namespace.path }
+ let(:non_existing_id) { 'non-existing-path' }
+
+ it_behaves_like 'namespace finder'
+ end
+ end
+
+ shared_examples 'user namespace finder' do
+ let(:user1) { create(:user) }
+
+ before do
+ allow(subject).to receive(:current_user).and_return(user1)
+ allow(subject).to receive(:header).and_return(nil)
+ allow(subject).to receive(:not_found!).and_raise('404 Namespace not found')
+ end
+
+ context 'when namespace is group' do
+ let(:namespace) { create(:group) }
+
+ context 'when user has access to group' do
+ before do
+ namespace.add_guest(user1)
+ namespace.save!
+ end
+
+ it 'returns requested namespace' do
+ expect(namespace_finder).to eq(namespace)
+ end
+ end
+
+ context "when user doesn't have access to group" do
+ it 'raises not found error' do
+ expect { namespace_finder }.to raise_error(RuntimeError, '404 Namespace not found')
+ end
+ end
+ end
+
+ context "when namespace is user's personal namespace" do
+ let(:namespace) { create(:namespace) }
+
+ context 'when user owns the namespace' do
+ before do
+ namespace.owner = user1
+ namespace.save!
+ end
+
+ it 'returns requested namespace' do
+ expect(namespace_finder).to eq(namespace)
+ end
+ end
+
+ context "when user doesn't own the namespace" do
+ it 'raises not found error' do
+ expect { namespace_finder }.to raise_error(RuntimeError, '404 Namespace not found')
+ end
+ end
+ end
+ end
+
+ describe '#find_namespace!' do
+ let(:namespace_finder) do
+ subject.find_namespace!(namespace.id)
+ end
+
+ it_behaves_like 'user namespace finder'
+ end
+
+ describe '#user_namespace' do
+ let(:namespace_finder) do
+ subject.user_namespace
+ end
+
+ before do
+ allow(subject).to receive(:params).and_return({ id: namespace.id })
+ end
+
+ it_behaves_like 'user namespace finder'
+ end
+end
diff --git a/spec/models/concerns/has_variable_spec.rb b/spec/models/concerns/has_variable_spec.rb
index f4b24e6d1d9..f87869a2fdc 100644
--- a/spec/models/concerns/has_variable_spec.rb
+++ b/spec/models/concerns/has_variable_spec.rb
@@ -9,6 +9,24 @@ describe HasVariable do
it { is_expected.not_to allow_value('foo bar').for(:key) }
it { is_expected.not_to allow_value('foo/bar').for(:key) }
+ describe '#key=' do
+ context 'when the new key is nil' do
+ it 'strips leading and trailing whitespaces' do
+ subject.key = nil
+
+ expect(subject.key).to eq('')
+ end
+ end
+
+ context 'when the new key has leadind and trailing whitespaces' do
+ it 'strips leading and trailing whitespaces' do
+ subject.key = ' my key '
+
+ expect(subject.key).to eq('my key')
+ end
+ end
+ end
+
describe '#value' do
before do
subject.value = 'secret'
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 17dc3bb4f48..4f4e634829d 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -56,6 +56,7 @@ describe GroupPolicy do
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
expect_disallowed(*owner_permissions)
+ expect_disallowed(:read_namespace)
end
end
@@ -63,7 +64,7 @@ describe GroupPolicy do
let(:current_user) { guest }
it do
- expect_allowed(:read_group)
+ expect_allowed(:read_group, :read_namespace)
expect_disallowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -75,7 +76,7 @@ describe GroupPolicy do
let(:current_user) { reporter }
it do
- expect_allowed(:read_group)
+ expect_allowed(:read_group, :read_namespace)
expect_allowed(*reporter_permissions)
expect_disallowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -87,7 +88,7 @@ describe GroupPolicy do
let(:current_user) { developer }
it do
- expect_allowed(:read_group)
+ expect_allowed(:read_group, :read_namespace)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_disallowed(*master_permissions)
@@ -99,7 +100,7 @@ describe GroupPolicy do
let(:current_user) { master }
it do
- expect_allowed(:read_group)
+ expect_allowed(:read_group, :read_namespace)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -113,7 +114,7 @@ describe GroupPolicy do
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
- expect_allowed(:read_group)
+ expect_allowed(:read_group, :read_namespace)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
@@ -127,7 +128,7 @@ describe GroupPolicy do
it do
allow(Group).to receive(:supports_nested_groups?).and_return(true)
- expect_allowed(:read_group)
+ expect_allowed(:read_group, :read_namespace)
expect_allowed(*reporter_permissions)
expect_allowed(*developer_permissions)
expect_allowed(*master_permissions)
diff --git a/spec/policies/namespace_policy_spec.rb b/spec/policies/namespace_policy_spec.rb
index e52ff02e5f0..1fdf95ad716 100644
--- a/spec/policies/namespace_policy_spec.rb
+++ b/spec/policies/namespace_policy_spec.rb
@@ -1,20 +1,42 @@
require 'spec_helper'
describe NamespacePolicy do
- let(:current_user) { create(:user) }
- let(:namespace) { current_user.namespace }
+ let(:user) { create(:user) }
+ let(:owner) { create(:user) }
+ let(:admin) { create(:admin) }
+ let(:namespace) { create(:namespace, owner: owner) }
+
+ let(:owner_permissions) { [:create_projects, :admin_namespace, :read_namespace] }
subject { described_class.new(current_user, namespace) }
- context "create projects" do
- context "user namespace" do
- it { is_expected.to be_allowed(:create_projects) }
- end
+ context 'with no user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_banned }
+ end
+
+ context 'regular user' do
+ let(:current_user) { user }
+
+ it { is_expected.to be_disallowed(*owner_permissions) }
+ end
+
+ context 'owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(*owner_permissions) }
- context "user who has exceeded project limit" do
- let(:current_user) { create(:user, projects_limit: 0) }
+ context 'user who has exceeded project limit' do
+ let(:owner) { create(:user, projects_limit: 0) }
it { is_expected.not_to be_allowed(:create_projects) }
end
end
+
+ context 'admin' do
+ let(:current_user) { admin }
+
+ it { is_expected.to be_allowed(*owner_permissions) }
+ end
end
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index e60716d46d7..98102fcd6a7 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -91,4 +91,127 @@ describe API::Namespaces do
end
end
end
+
+ describe 'GET /namespaces/:id' do
+ let(:owned_group) { group1 }
+ let(:user2) { create(:user) }
+
+ shared_examples 'can access namespace' do
+ it 'returns namespace details' do
+ get api("/namespaces/#{namespace_id}", request_actor)
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(json_response['id']).to eq(requested_namespace.id)
+ expect(json_response['path']).to eq(requested_namespace.path)
+ expect(json_response['name']).to eq(requested_namespace.name)
+ end
+ end
+
+ shared_examples 'namespace reader' do
+ let(:requested_namespace) { owned_group }
+
+ before do
+ owned_group.add_owner(request_actor)
+ end
+
+ context 'when namespace exists' do
+ context 'when requested by ID' do
+ context 'when requesting group' do
+ let(:namespace_id) { owned_group.id }
+
+ it_behaves_like 'can access namespace'
+ end
+
+ context 'when requesting personal namespace' do
+ let(:namespace_id) { request_actor.namespace.id }
+ let(:requested_namespace) { request_actor.namespace }
+
+ it_behaves_like 'can access namespace'
+ end
+ end
+
+ context 'when requested by path' do
+ context 'when requesting group' do
+ let(:namespace_id) { owned_group.path }
+
+ it_behaves_like 'can access namespace'
+ end
+
+ context 'when requesting personal namespace' do
+ let(:namespace_id) { request_actor.namespace.path }
+ let(:requested_namespace) { request_actor.namespace }
+
+ it_behaves_like 'can access namespace'
+ end
+ end
+ end
+
+ context "when namespace doesn't exist" do
+ it 'returns not-found' do
+ get api('/namespaces/9999', request_actor)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context 'when unauthenticated' do
+ it 'returns authentication error' do
+ get api("/namespaces/#{group1.id}")
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+
+ context 'when authenticated as regular user' do
+ let(:request_actor) { user }
+
+ context 'when requested namespace is not owned by user' do
+ context 'when requesting group' do
+ it 'returns not-found' do
+ get api("/namespaces/#{group2.id}", request_actor)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context 'when requesting personal namespace' do
+ it 'returns not-found' do
+ get api("/namespaces/#{user2.namespace.id}", request_actor)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context 'when requested namespace is owned by user' do
+ it_behaves_like 'namespace reader'
+ end
+ end
+
+ context 'when authenticated as admin' do
+ let(:request_actor) { admin }
+
+ context 'when requested namespace is not owned by user' do
+ context 'when requesting group' do
+ let(:namespace_id) { group2.id }
+ let(:requested_namespace) { group2 }
+
+ it_behaves_like 'can access namespace'
+ end
+
+ context 'when requesting personal namespace' do
+ let(:namespace_id) { user2.namespace.id }
+ let(:requested_namespace) { user2.namespace }
+
+ it_behaves_like 'can access namespace'
+ end
+ end
+
+ context 'when requested namespace is owned by user' do
+ it_behaves_like 'namespace reader'
+ end
+ end
+ end
end
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index fe38a7b3251..ec5cad4f4fd 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -354,6 +354,140 @@ describe API::Runners do
end
end
+ describe 'GET /runners/:id/jobs' do
+ set(:job_1) { create(:ci_build) }
+ let!(:job_2) { create(:ci_build, :running, runner: shared_runner, project: project) }
+ let!(:job_3) { create(:ci_build, :failed, runner: shared_runner, project: project) }
+ let!(:job_4) { create(:ci_build, :running, runner: specific_runner, project: project) }
+ let!(:job_5) { create(:ci_build, :failed, runner: specific_runner, project: project) }
+
+ context 'admin user' do
+ context 'when runner exists' do
+ context 'when runner is shared' do
+ it 'return jobs' do
+ get api("/runners/#{shared_runner.id}/jobs", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(2)
+ end
+ end
+
+ context 'when runner is specific' do
+ it 'return jobs' do
+ get api("/runners/#{specific_runner.id}/jobs", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(2)
+ end
+ end
+
+ context 'when valid status is provided' do
+ it 'return filtered jobs' do
+ get api("/runners/#{specific_runner.id}/jobs?status=failed", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(1)
+ expect(json_response.first).to include('id' => job_5.id)
+ end
+ end
+
+ context 'when invalid status is provided' do
+ it 'return 400' do
+ get api("/runners/#{specific_runner.id}/jobs?status=non-existing", admin)
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+ end
+
+ context "when runner doesn't exist" do
+ it 'returns 404' do
+ get api('/runners/9999/jobs', admin)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context "runner project's administrative user" do
+ context 'when runner exists' do
+ context 'when runner is shared' do
+ it 'returns 403' do
+ get api("/runners/#{shared_runner.id}/jobs", user)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'when runner is specific' do
+ it 'return jobs' do
+ get api("/runners/#{specific_runner.id}/jobs", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(2)
+ end
+ end
+
+ context 'when valid status is provided' do
+ it 'return filtered jobs' do
+ get api("/runners/#{specific_runner.id}/jobs?status=failed", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+
+ expect(json_response).to be_an(Array)
+ expect(json_response.length).to eq(1)
+ expect(json_response.first).to include('id' => job_5.id)
+ end
+ end
+
+ context 'when invalid status is provided' do
+ it 'return 400' do
+ get api("/runners/#{specific_runner.id}/jobs?status=non-existing", user)
+
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+ end
+
+ context "when runner doesn't exist" do
+ it 'returns 404' do
+ get api('/runners/9999/jobs', user)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+
+ context 'other authorized user' do
+ it 'does not return jobs' do
+ get api("/runners/#{specific_runner.id}/jobs", user2)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+
+ context 'unauthorized user' do
+ it 'does not return jobs' do
+ get api("/runners/#{specific_runner.id}/jobs")
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
+ end
+
describe 'GET /projects/:id/runners' do
context 'authorized user with master privileges' do
it "returns project's runners" do
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
new file mode 100644
index 00000000000..50e59954f73
--- /dev/null
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -0,0 +1,63 @@
+require 'spec_helper'
+
+describe Projects::HashedStorage::MigrateAttachmentsService do
+ subject(:service) { described_class.new(project) }
+ let(:project) { create(:project) }
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+ let!(:upload) { Upload.find_by(path: file_uploader.relative_path) }
+ let(:file_uploader) { build(:file_uploader, project: project) }
+ let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
+ let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
+
+ context '#execute' do
+ context 'when succeeds' do
+ it 'moves attachments to hashed storage layout' do
+ expect(File.file?(old_path)).to be_truthy
+ expect(File.file?(new_path)).to be_falsey
+ expect(File.exist?(base_path(legacy_storage))).to be_truthy
+ expect(File.exist?(base_path(hashed_storage))).to be_falsey
+ expect(FileUtils).to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)).and_call_original
+
+ service.execute
+
+ expect(File.exist?(base_path(hashed_storage))).to be_truthy
+ expect(File.exist?(base_path(legacy_storage))).to be_falsey
+ expect(File.file?(old_path)).to be_falsey
+ expect(File.file?(new_path)).to be_truthy
+ end
+ end
+
+ context 'when original folder does not exist anymore' do
+ before do
+ FileUtils.rm_rf(base_path(legacy_storage))
+ end
+
+ it 'skips moving folders and go to next' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+ service.execute
+
+ expect(File.exist?(base_path(hashed_storage))).to be_falsey
+ expect(File.file?(new_path)).to be_falsey
+ end
+ end
+
+ context 'when target folder already exists' do
+ before do
+ FileUtils.mkdir_p(base_path(hashed_storage))
+ end
+
+ it 'raises AttachmentMigrationError' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+ expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentMigrationError)
+ end
+ end
+ end
+
+ def base_path(storage)
+ FileUploader.dynamic_path_builder(storage.disk_path)
+ end
+end
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
new file mode 100644
index 00000000000..3a3e47fd9c0
--- /dev/null
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -0,0 +1,76 @@
+require 'spec_helper'
+
+describe Projects::HashedStorage::MigrateRepositoryService do
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:project) { create(:project, :empty_repo, :wiki_repo) }
+ let(:service) { described_class.new(project) }
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+ describe '#execute' do
+ before do
+ allow(service).to receive(:gitlab_shell) { gitlab_shell }
+ end
+
+ context 'when succeeds' do
+ it 'renames project and wiki repositories' do
+ service.execute
+
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_truthy
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy
+ end
+
+ it 'updates project to be hashed and not read-only' do
+ service.execute
+
+ expect(project.hashed_storage?(:repository)).to be_truthy
+ expect(project.repository_read_only).to be_falsey
+ end
+
+ it 'move operation is called for both repositories' do
+ expect_move_repository(project.disk_path, hashed_storage.disk_path)
+ expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
+
+ service.execute
+ end
+ end
+
+ context 'when one move fails' do
+ it 'rollsback repositories to original name' do
+ from_name = project.disk_path
+ to_name = hashed_storage.disk_path
+ allow(service).to receive(:move_repository).and_call_original
+ allow(service).to receive(:move_repository).with(from_name, to_name).once { false } # will disable first move only
+
+ expect(service).to receive(:rollback_folder_move).and_call_original
+
+ service.execute
+
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_falsey
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey
+ expect(project.repository_read_only?).to be_falsey
+ end
+
+ context 'when rollback fails' do
+ let(:from_name) { legacy_storage.disk_path }
+ let(:to_name) { hashed_storage.disk_path }
+
+ before do
+ hashed_storage.ensure_storage_path_exists
+ gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
+ end
+
+ it 'does not try to move nil repository over hashed' do
+ expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name)
+ expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
+
+ service.execute
+ end
+ end
+ end
+
+ def expect_move_repository(from_name, to_name)
+ expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name).and_call_original
+ end
+ end
+end
diff --git a/spec/services/projects/hashed_storage_migration_service_spec.rb b/spec/services/projects/hashed_storage_migration_service_spec.rb
index b71b47c59b6..466f0b5d7c2 100644
--- a/spec/services/projects/hashed_storage_migration_service_spec.rb
+++ b/spec/services/projects/hashed_storage_migration_service_spec.rb
@@ -1,74 +1,44 @@
require 'spec_helper'
describe Projects::HashedStorageMigrationService do
- let(:gitlab_shell) { Gitlab::Shell.new }
let(:project) { create(:project, :empty_repo, :wiki_repo) }
- let(:service) { described_class.new(project) }
- let(:legacy_storage) { Storage::LegacyProject.new(project) }
- let(:hashed_storage) { Storage::HashedProject.new(project) }
+ subject(:service) { described_class.new(project) }
describe '#execute' do
- before do
- allow(service).to receive(:gitlab_shell) { gitlab_shell }
- end
-
- context 'when succeeds' do
- it 'renames project and wiki repositories' do
- service.execute
+ context 'repository migration' do
+ let(:repository_service) { Projects::HashedStorage::MigrateRepositoryService.new(project, subject.logger) }
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_truthy
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy
- end
+ it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
+ expect(Projects::HashedStorage::MigrateRepositoryService).to receive(:new).with(project, subject.logger).and_return(repository_service)
+ expect(repository_service).to receive(:execute)
- it 'updates project to be hashed and not read-only' do
service.execute
-
- expect(project.hashed_storage?(:repository)).to be_truthy
- expect(project.repository_read_only).to be_falsey
end
- it 'move operation is called for both repositories' do
- expect_move_repository(project.disk_path, hashed_storage.disk_path)
- expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
+ it 'does not delegate migration if repository is already migrated' do
+ project.storage_version = ::Project::LATEST_STORAGE_VERSION
+ expect(Projects::HashedStorage::MigrateRepositoryService).not_to receive(:new)
service.execute
end
end
- context 'when one move fails' do
- it 'rollsback repositories to original name' do
- from_name = project.disk_path
- to_name = hashed_storage.disk_path
- allow(service).to receive(:move_repository).and_call_original
- allow(service).to receive(:move_repository).with(from_name, to_name).once { false } # will disable first move only
+ context 'attachments migration' do
+ let(:attachments_service) { Projects::HashedStorage::MigrateAttachmentsService.new(project, subject.logger) }
- expect(service).to receive(:rollback_folder_move).and_call_original
+ it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do
+ expect(Projects::HashedStorage::MigrateAttachmentsService).to receive(:new).with(project, subject.logger).and_return(attachments_service)
+ expect(attachments_service).to receive(:execute)
service.execute
-
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_falsey
- expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey
end
- context 'when rollback fails' do
- before do
- from_name = legacy_storage.disk_path
- to_name = hashed_storage.disk_path
+ it 'does not delegate migration if attachments are already migrated' do
+ project.storage_version = ::Project::LATEST_STORAGE_VERSION
+ expect(Projects::HashedStorage::MigrateAttachmentsService).not_to receive(:new)
- hashed_storage.ensure_storage_path_exists
- gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
- end
-
- it 'does not try to move nil repository over hashed' do
- expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
-
- service.execute
- end
+ service.execute
end
end
-
- def expect_move_repository(from_name, to_name)
- expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name).and_call_original
- end
end
end
diff --git a/spec/workers/project_migrate_hashed_storage_worker_spec.rb b/spec/workers/project_migrate_hashed_storage_worker_spec.rb
index f5226dee0ad..2e3951e7afc 100644
--- a/spec/workers/project_migrate_hashed_storage_worker_spec.rb
+++ b/spec/workers/project_migrate_hashed_storage_worker_spec.rb
@@ -1,29 +1,53 @@
require 'spec_helper'
-describe ProjectMigrateHashedStorageWorker do
+describe ProjectMigrateHashedStorageWorker, :clean_gitlab_redis_shared_state do
describe '#perform' do
let(:project) { create(:project, :empty_repo) }
let(:pending_delete_project) { create(:project, :empty_repo, pending_delete: true) }
- it 'skips when project no longer exists' do
- nonexistent_id = 999999999999
+ context 'when have exclusive lease' do
+ before do
+ lease = subject.lease_for(project.id)
- expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
- subject.perform(nonexistent_id)
- end
+ allow(Gitlab::ExclusiveLease).to receive(:new).and_return(lease)
+ allow(lease).to receive(:try_obtain).and_return(true)
+ end
+
+ it 'skips when project no longer exists' do
+ nonexistent_id = 999999999999
+
+ expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
+ subject.perform(nonexistent_id)
+ end
+
+ it 'skips when project is pending delete' do
+ expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
- it 'skips when project is pending delete' do
- expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
+ subject.perform(pending_delete_project.id)
+ end
- subject.perform(pending_delete_project.id)
+ it 'delegates removal to service class' do
+ service = double('service')
+ expect(::Projects::HashedStorageMigrationService).to receive(:new).with(project, subject.logger).and_return(service)
+ expect(service).to receive(:execute)
+
+ subject.perform(project.id)
+ end
end
- it 'delegates removal to service class' do
- service = double('service')
- expect(::Projects::HashedStorageMigrationService).to receive(:new).with(project, subject.logger).and_return(service)
- expect(service).to receive(:execute)
+ context 'when dont have exclusive lease' do
+ before do
+ lease = subject.lease_for(project.id)
+
+ allow(Gitlab::ExclusiveLease).to receive(:new).and_return(lease)
+ allow(lease).to receive(:try_obtain).and_return(false)
+ end
+
+ it 'skips when dont have lease' do
+ expect(::Projects::HashedStorageMigrationService).not_to receive(:new)
- subject.perform(project.id)
+ subject.perform(project.id)
+ end
end
end
end