summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorGrzegorz Bizon <grzesiek.bizon@gmail.com>2018-03-07 09:59:51 +0100
committerGrzegorz Bizon <grzesiek.bizon@gmail.com>2018-03-07 09:59:51 +0100
commita2a8e36178853b5f8fa2eda306b33f8f97970745 (patch)
tree0f8cf46e5c2968bcaa5a42c61f46338f1b921b0b /lib
parente85e1dbb57af835945b37dc03d1f850cbdaf4d82 (diff)
parent95016507d49c3099afde0ef3909377bf70061dc3 (diff)
downloadgitlab-ce-a2a8e36178853b5f8fa2eda306b33f8f97970745.tar.gz
Merge branch 'master' into backstage/gb/refactor-ci-cd-variables-collections
* master: (6164 commits)
Diffstat (limited to 'lib')
-rw-r--r--lib/additional_email_headers_interceptor.rb6
-rw-r--r--lib/after_commit_queue.rb34
-rw-r--r--lib/api/access_requests.rb14
-rw-r--r--lib/api/api.rb27
-rw-r--r--lib/api/api_guard.rb120
-rw-r--r--lib/api/applications.rb27
-rw-r--r--lib/api/badges.rb134
-rw-r--r--lib/api/boards.rb84
-rw-r--r--lib/api/boards_responses.rb50
-rw-r--r--lib/api/branches.rb67
-rw-r--r--lib/api/circuit_breakers.rb6
-rw-r--r--lib/api/commits.rb74
-rw-r--r--lib/api/custom_attributes_endpoints.rb77
-rw-r--r--lib/api/deploy_keys.rb65
-rw-r--r--lib/api/deployments.rb4
-rw-r--r--lib/api/entities.rb418
-rw-r--r--lib/api/group_boards.rb117
-rw-r--r--lib/api/groups.rb108
-rw-r--r--lib/api/helpers.rb150
-rw-r--r--lib/api/helpers/badges_helpers.rb28
-rw-r--r--lib/api/helpers/common_helpers.rb6
-rw-r--r--lib/api/helpers/custom_attributes.rb28
-rw-r--r--lib/api/helpers/custom_validators.rb1
-rw-r--r--lib/api/helpers/internal_helpers.rb53
-rw-r--r--lib/api/helpers/pagination.rb27
-rw-r--r--lib/api/helpers/runner.rb38
-rw-r--r--lib/api/internal.rb54
-rw-r--r--lib/api/issues.rb45
-rw-r--r--lib/api/job_artifacts.rb17
-rw-r--r--lib/api/jobs.rb3
-rw-r--r--lib/api/labels.rb4
-rw-r--r--lib/api/members.rb29
-rw-r--r--lib/api/merge_requests.rb114
-rw-r--r--lib/api/namespaces.rb10
-rw-r--r--lib/api/notes.rb11
-rw-r--r--lib/api/notification_settings.rb2
-rw-r--r--lib/api/pages_domains.rb139
-rw-r--r--lib/api/pipelines.rb3
-rw-r--r--lib/api/project_export.rb41
-rw-r--r--lib/api/project_hooks.rb1
-rw-r--r--lib/api/project_import.rb69
-rw-r--r--lib/api/project_milestones.rb9
-rw-r--r--lib/api/project_snippets.rb3
-rw-r--r--lib/api/projects.rb58
-rw-r--r--lib/api/projects_relation_builder.rb34
-rw-r--r--lib/api/protected_branches.rb24
-rw-r--r--lib/api/repositories.rb13
-rw-r--r--lib/api/runner.rb20
-rw-r--r--lib/api/runners.rb27
-rw-r--r--lib/api/search.rb111
-rw-r--r--lib/api/services.rb240
-rw-r--r--lib/api/session.rb20
-rw-r--r--lib/api/settings.rb18
-rw-r--r--lib/api/snippets.rb1
-rw-r--r--lib/api/system_hooks.rb1
-rw-r--r--lib/api/tags.rb19
-rw-r--r--lib/api/templates.rb16
-rw-r--r--lib/api/time_tracking_endpoints.rb4
-rw-r--r--lib/api/todos.rb2
-rw-r--r--lib/api/triggers.rb2
-rw-r--r--lib/api/users.rb61
-rw-r--r--lib/api/v3/branches.rb14
-rw-r--r--lib/api/v3/builds.rb5
-rw-r--r--lib/api/v3/commits.rb35
-rw-r--r--lib/api/v3/deploy_keys.rb46
-rw-r--r--lib/api/v3/entities.rb25
-rw-r--r--lib/api/v3/issues.rb6
-rw-r--r--lib/api/v3/labels.rb2
-rw-r--r--lib/api/v3/members.rb18
-rw-r--r--lib/api/v3/merge_requests.rb9
-rw-r--r--lib/api/v3/pipelines.rb2
-rw-r--r--lib/api/v3/project_hooks.rb1
-rw-r--r--lib/api/v3/project_snippets.rb1
-rw-r--r--lib/api/v3/projects.rb8
-rw-r--r--lib/api/v3/repositories.rb11
-rw-r--r--lib/api/v3/runners.rb1
-rw-r--r--lib/api/v3/services.rb22
-rw-r--r--lib/api/v3/settings.rb8
-rw-r--r--lib/api/v3/snippets.rb3
-rw-r--r--lib/api/v3/tags.rb4
-rw-r--r--lib/api/v3/templates.rb16
-rw-r--r--lib/api/v3/time_tracking_endpoints.rb4
-rw-r--r--lib/api/v3/todos.rb2
-rw-r--r--lib/api/v3/triggers.rb2
-rw-r--r--lib/backup/artifacts.rb2
-rw-r--r--lib/backup/database.rb1
-rw-r--r--lib/backup/files.rb6
-rw-r--r--lib/backup/manager.rb87
-rw-r--r--lib/backup/repository.rb34
-rw-r--r--lib/banzai.rb4
-rw-r--r--lib/banzai/color_parser.rb44
-rw-r--r--lib/banzai/cross_project_reference.rb2
-rw-r--r--lib/banzai/filter/absolute_link_filter.rb34
-rw-r--r--lib/banzai/filter/abstract_reference_filter.rb128
-rw-r--r--lib/banzai/filter/autolink_filter.rb86
-rw-r--r--lib/banzai/filter/color_filter.rb31
-rw-r--r--lib/banzai/filter/commit_reference_filter.rb37
-rw-r--r--lib/banzai/filter/emoji_filter.rb4
-rw-r--r--lib/banzai/filter/epic_reference_filter.rb12
-rw-r--r--lib/banzai/filter/gollum_tags_filter.rb4
-rw-r--r--lib/banzai/filter/html_entity_filter.rb2
-rw-r--r--lib/banzai/filter/issuable_reference_filter.rb31
-rw-r--r--lib/banzai/filter/issuable_state_filter.rb6
-rw-r--r--lib/banzai/filter/issue_reference_filter.rb32
-rw-r--r--lib/banzai/filter/label_reference_filter.rb4
-rw-r--r--lib/banzai/filter/markdown_filter.rb32
-rw-r--r--lib/banzai/filter/merge_request_reference_filter.rb37
-rw-r--r--lib/banzai/filter/mermaid_filter.rb11
-rw-r--r--lib/banzai/filter/milestone_reference_filter.rb6
-rw-r--r--lib/banzai/filter/reference_filter.rb6
-rw-r--r--lib/banzai/filter/relative_link_filter.rb55
-rw-r--r--lib/banzai/filter/sanitization_filter.rb18
-rw-r--r--lib/banzai/filter/syntax_highlight_filter.rb41
-rw-r--r--lib/banzai/filter/table_of_contents_filter.rb1
-rw-r--r--lib/banzai/filter/upload_link_filter.rb46
-rw-r--r--lib/banzai/filter/user_reference_filter.rb57
-rw-r--r--lib/banzai/filter/wiki_link_filter/rewriter.rb4
-rw-r--r--lib/banzai/issuable_extractor.rb4
-rw-r--r--lib/banzai/note_renderer.rb21
-rw-r--r--lib/banzai/object_renderer.rb18
-rw-r--r--lib/banzai/pipeline/broadcast_message_pipeline.rb1
-rw-r--r--lib/banzai/pipeline/gfm_pipeline.rb3
-rw-r--r--lib/banzai/pipeline/post_process_pipeline.rb3
-rw-r--r--lib/banzai/querying.rb2
-rw-r--r--lib/banzai/redactor.rb46
-rw-r--r--lib/banzai/reference_parser/epic_parser.rb12
-rw-r--r--lib/banzai/reference_parser/issuable_parser.rb25
-rw-r--r--lib/banzai/reference_parser/issue_parser.rb37
-rw-r--r--lib/banzai/reference_parser/merge_request_parser.rb24
-rw-r--r--lib/banzai/reference_parser/user_parser.rb1
-rw-r--r--lib/banzai/renderer.rb20
-rw-r--r--lib/banzai/request_store_reference_cache.rb27
-rw-r--r--lib/bitbucket/connection.rb2
-rw-r--r--lib/carrier_wave_string_file.rb5
-rw-r--r--lib/constraints/group_url_constrainer.rb2
-rw-r--r--lib/constraints/project_url_constrainer.rb2
-rw-r--r--lib/constraints/user_url_constrainer.rb2
-rw-r--r--lib/container_registry/registry.rb2
-rw-r--r--lib/declarative_policy.rb2
-rw-r--r--lib/declarative_policy/base.rb2
-rw-r--r--lib/declarative_policy/cache.rb2
-rw-r--r--lib/declarative_policy/rule.rb25
-rw-r--r--lib/declarative_policy/runner.rb32
-rw-r--r--lib/email_template_interceptor.rb4
-rw-r--r--lib/extracts_path.rb20
-rw-r--r--lib/feature.rb23
-rw-r--r--lib/file_size_validator.rb1
-rw-r--r--lib/gitaly/server.rb43
-rw-r--r--lib/github/client.rb53
-rw-r--r--lib/github/collection.rb29
-rw-r--r--lib/github/error.rb3
-rw-r--r--lib/github/import.rb392
-rw-r--r--lib/github/rate_limit.rb27
-rw-r--r--lib/github/repositories.rb19
-rw-r--r--lib/github/representation/base.rb30
-rw-r--r--lib/github/representation/branch.rb63
-rw-r--r--lib/github/representation/comment.rb42
-rw-r--r--lib/github/representation/issuable.rb37
-rw-r--r--lib/github/representation/issue.rb25
-rw-r--r--lib/github/representation/label.rb13
-rw-r--r--lib/github/representation/milestone.rb25
-rw-r--r--lib/github/representation/pull_request.rb120
-rw-r--r--lib/github/representation/release.rb17
-rw-r--r--lib/github/representation/repo.rb6
-rw-r--r--lib/github/representation/user.rb15
-rw-r--r--lib/github/response.rb25
-rw-r--r--lib/github/user.rb24
-rw-r--r--lib/gitlab/access.rb6
-rw-r--r--lib/gitlab/action_rate_limiter.rb47
-rw-r--r--lib/gitlab/asciidoc.rb11
-rw-r--r--lib/gitlab/auth.rb92
-rw-r--r--lib/gitlab/auth/blocked_user_tracker.rb36
-rw-r--r--lib/gitlab/auth/database/authentication.rb16
-rw-r--r--lib/gitlab/auth/ldap/access.rb89
-rw-r--r--lib/gitlab/auth/ldap/adapter.rb110
-rw-r--r--lib/gitlab/auth/ldap/auth_hash.rb48
-rw-r--r--lib/gitlab/auth/ldap/authentication.rb68
-rw-r--r--lib/gitlab/auth/ldap/config.rb237
-rw-r--r--lib/gitlab/auth/ldap/dn.rb303
-rw-r--r--lib/gitlab/auth/ldap/person.rb122
-rw-r--r--lib/gitlab/auth/ldap/user.rb54
-rw-r--r--lib/gitlab/auth/o_auth/auth_hash.rb92
-rw-r--r--lib/gitlab/auth/o_auth/authentication.rb21
-rw-r--r--lib/gitlab/auth/o_auth/provider.rb73
-rw-r--r--lib/gitlab/auth/o_auth/session.rb21
-rw-r--r--lib/gitlab/auth/o_auth/user.rb246
-rw-r--r--lib/gitlab/auth/request_authenticator.rb33
-rw-r--r--lib/gitlab/auth/saml/auth_hash.rb19
-rw-r--r--lib/gitlab/auth/saml/config.rb21
-rw-r--r--lib/gitlab/auth/saml/user.rb52
-rw-r--r--lib/gitlab/auth/user_auth_finders.rb107
-rw-r--r--lib/gitlab/background_migration/.rubocop.yml52
-rw-r--r--lib/gitlab/background_migration/add_merge_request_diff_commits_count.rb26
-rw-r--r--lib/gitlab/background_migration/cleanup_concurrent_type_change.rb54
-rw-r--r--lib/gitlab/background_migration/copy_column.rb41
-rw-r--r--lib/gitlab/background_migration/create_fork_network_memberships_range.rb86
-rw-r--r--lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb57
-rw-r--r--lib/gitlab/background_migration/delete_conflicting_redirect_routes_range.rb38
-rw-r--r--lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits.rb39
-rw-r--r--lib/gitlab/background_migration/migrate_build_stage.rb48
-rw-r--r--lib/gitlab/background_migration/migrate_build_stage_id_reference.rb3
-rw-r--r--lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb20
-rw-r--r--lib/gitlab/background_migration/migrate_stage_status.rb4
-rw-r--r--lib/gitlab/background_migration/migrate_system_uploads_to_new_folder.rb4
-rw-r--r--lib/gitlab/background_migration/move_personal_snippet_files.rb4
-rw-r--r--lib/gitlab/background_migration/normalize_ldap_extern_uids_range.rb322
-rw-r--r--lib/gitlab/background_migration/populate_fork_networks_range.rb128
-rw-r--r--lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data.rb135
-rw-r--r--lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id.rb33
-rw-r--r--lib/gitlab/background_migration/populate_untracked_uploads.rb113
-rw-r--r--lib/gitlab/background_migration/populate_untracked_uploads_dependencies.rb201
-rw-r--r--lib/gitlab/background_migration/prepare_untracked_uploads.rb181
-rw-r--r--lib/gitlab/badge/coverage/report.rb2
-rw-r--r--lib/gitlab/badge/coverage/template.rb4
-rw-r--r--lib/gitlab/bare_repository_import/importer.rb106
-rw-r--r--lib/gitlab/bare_repository_import/repository.rb69
-rw-r--r--lib/gitlab/bare_repository_importer.rb96
-rw-r--r--lib/gitlab/bitbucket_import/importer.rb15
-rw-r--r--lib/gitlab/cache/request_cache.rb6
-rw-r--r--lib/gitlab/changes_list.rb1
-rw-r--r--lib/gitlab/checks/change_access.rb85
-rw-r--r--lib/gitlab/checks/commit_check.rb61
-rw-r--r--lib/gitlab/checks/force_push.rb4
-rw-r--r--lib/gitlab/checks/lfs_integrity.rb27
-rw-r--r--lib/gitlab/checks/post_push_message.rb46
-rw-r--r--lib/gitlab/checks/project_created.rb31
-rw-r--r--lib/gitlab/checks/project_moved.rb47
-rw-r--r--lib/gitlab/ci/ansi2html.rb20
-rw-r--r--lib/gitlab/ci/build/artifacts/metadata.rb1
-rw-r--r--lib/gitlab/ci/build/artifacts/metadata/entry.rb5
-rw-r--r--lib/gitlab/ci/build/image.rb1
-rw-r--r--lib/gitlab/ci/charts.rb17
-rw-r--r--lib/gitlab/ci/config/entry/configurable.rb12
-rw-r--r--lib/gitlab/ci/config/entry/image.rb1
-rw-r--r--lib/gitlab/ci/config/entry/node.rb4
-rw-r--r--lib/gitlab/ci/config/entry/validatable.rb2
-rw-r--r--lib/gitlab/ci/config/entry/validators.rb17
-rw-r--r--lib/gitlab/ci/config/loader.rb2
-rw-r--r--lib/gitlab/ci/pipeline/chain/base.rb26
-rw-r--r--lib/gitlab/ci/pipeline/chain/build.rb30
-rw-r--r--lib/gitlab/ci/pipeline/chain/command.rb61
-rw-r--r--lib/gitlab/ci/pipeline/chain/create.rb29
-rw-r--r--lib/gitlab/ci/pipeline/chain/helpers.rb13
-rw-r--r--lib/gitlab/ci/pipeline/chain/sequence.rb35
-rw-r--r--lib/gitlab/ci/pipeline/chain/skip.rb37
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/abilities.rb54
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/config.rb35
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/repository.rb27
-rw-r--r--lib/gitlab/ci/pipeline/duration.rb143
-rw-r--r--lib/gitlab/ci/pipeline/expression/lexeme/base.rb25
-rw-r--r--lib/gitlab/ci/pipeline/expression/lexeme/equals.rb26
-rw-r--r--lib/gitlab/ci/pipeline/expression/lexeme/null.rb25
-rw-r--r--lib/gitlab/ci/pipeline/expression/lexeme/operator.rb15
-rw-r--r--lib/gitlab/ci/pipeline/expression/lexeme/string.rb25
-rw-r--r--lib/gitlab/ci/pipeline/expression/lexeme/value.rb15
-rw-r--r--lib/gitlab/ci/pipeline/expression/lexeme/variable.rb25
-rw-r--r--lib/gitlab/ci/pipeline/expression/lexer.rb59
-rw-r--r--lib/gitlab/ci/pipeline/expression/parser.rb40
-rw-r--r--lib/gitlab/ci/pipeline/expression/statement.rb42
-rw-r--r--lib/gitlab/ci/pipeline/expression/token.rb28
-rw-r--r--lib/gitlab/ci/pipeline_duration.rb141
-rw-r--r--lib/gitlab/ci/stage/seed.rb8
-rw-r--r--lib/gitlab/ci/status/build/action.rb5
-rw-r--r--lib/gitlab/ci/status/build/cancelable.rb2
-rw-r--r--lib/gitlab/ci/status/build/failed_allowed.rb2
-rw-r--r--lib/gitlab/ci/status/build/play.rb2
-rw-r--r--lib/gitlab/ci/status/build/retryable.rb2
-rw-r--r--lib/gitlab/ci/status/build/stop.rb2
-rw-r--r--lib/gitlab/ci/status/canceled.rb2
-rw-r--r--lib/gitlab/ci/status/created.rb2
-rw-r--r--lib/gitlab/ci/status/failed.rb2
-rw-r--r--lib/gitlab/ci/status/manual.rb2
-rw-r--r--lib/gitlab/ci/status/pending.rb2
-rw-r--r--lib/gitlab/ci/status/running.rb2
-rw-r--r--lib/gitlab/ci/status/skipped.rb2
-rw-r--r--lib/gitlab/ci/status/success.rb2
-rw-r--r--lib/gitlab/ci/status/success_warning.rb2
-rw-r--r--lib/gitlab/ci/trace.rb65
-rw-r--r--lib/gitlab/ci/trace/section_parser.rb97
-rw-r--r--lib/gitlab/ci/trace/stream.rb17
-rw-r--r--lib/gitlab/ci/yaml_processor.rb2
-rw-r--r--lib/gitlab/closing_issue_extractor.rb3
-rw-r--r--lib/gitlab/conflict/file.rb88
-rw-r--r--lib/gitlab/conflict/file_collection.rb71
-rw-r--r--lib/gitlab/conflict/parser.rb74
-rw-r--r--lib/gitlab/conflict/resolution_error.rb5
-rw-r--r--lib/gitlab/contributions_calendar.rb8
-rw-r--r--lib/gitlab/cross_project_access.rb67
-rw-r--r--lib/gitlab/cross_project_access/check_collection.rb47
-rw-r--r--lib/gitlab/cross_project_access/check_info.rb66
-rw-r--r--lib/gitlab/cross_project_access/class_methods.rb48
-rw-r--r--lib/gitlab/current_settings.rb108
-rw-r--r--lib/gitlab/cycle_analytics/base_event_fetcher.rb4
-rw-r--r--lib/gitlab/cycle_analytics/base_query.rb10
-rw-r--r--lib/gitlab/cycle_analytics/base_stage.rb29
-rw-r--r--lib/gitlab/cycle_analytics/code_event_fetcher.rb6
-rw-r--r--lib/gitlab/cycle_analytics/issue_allowed.rb9
-rw-r--r--lib/gitlab/cycle_analytics/issue_event_fetcher.rb6
-rw-r--r--lib/gitlab/cycle_analytics/merge_request_allowed.rb9
-rw-r--r--lib/gitlab/cycle_analytics/plan_event_fetcher.rb12
-rw-r--r--lib/gitlab/cycle_analytics/production_helper.rb6
-rw-r--r--lib/gitlab/cycle_analytics/review_event_fetcher.rb8
-rw-r--r--lib/gitlab/cycle_analytics/staging_event_fetcher.rb4
-rw-r--r--lib/gitlab/cycle_analytics/test_stage.rb6
-rw-r--r--lib/gitlab/cycle_analytics/usage_data.rb72
-rw-r--r--lib/gitlab/daemon.rb3
-rw-r--r--lib/gitlab/data_builder/push.rb9
-rw-r--r--lib/gitlab/database.rb54
-rw-r--r--lib/gitlab/database/grant.rb48
-rw-r--r--lib/gitlab/database/median.rb130
-rw-r--r--lib/gitlab/database/migration_helpers.rb142
-rw-r--r--lib/gitlab/database/rename_reserved_paths_migration/v1/migration_classes.rb16
-rw-r--r--lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects.rb9
-rw-r--r--lib/gitlab/dependency_linker/composer_json_linker.rb2
-rw-r--r--lib/gitlab/dependency_linker/gemfile_linker.rb2
-rw-r--r--lib/gitlab/dependency_linker/podspec_linker.rb2
-rw-r--r--lib/gitlab/diff/diff_refs.rb6
-rw-r--r--lib/gitlab/diff/file.rb86
-rw-r--r--lib/gitlab/diff/file_collection/base.rb5
-rw-r--r--lib/gitlab/diff/formatters/base_formatter.rb61
-rw-r--r--lib/gitlab/diff/formatters/image_formatter.rb43
-rw-r--r--lib/gitlab/diff/formatters/text_formatter.rb49
-rw-r--r--lib/gitlab/diff/highlight.rb13
-rw-r--r--lib/gitlab/diff/image_point.rb23
-rw-r--r--lib/gitlab/diff/inline_diff.rb3
-rw-r--r--lib/gitlab/diff/line_code.rb9
-rw-r--r--lib/gitlab/diff/parser.rb5
-rw-r--r--lib/gitlab/diff/position.rb98
-rw-r--r--lib/gitlab/ee_compat_check.rb97
-rw-r--r--lib/gitlab/email/handler.rb2
-rw-r--r--lib/gitlab/email/handler/create_merge_request_handler.rb70
-rw-r--r--lib/gitlab/email/handler/unsubscribe_handler.rb1
-rw-r--r--lib/gitlab/email/receiver.rb6
-rw-r--r--lib/gitlab/email/reply_parser.rb2
-rw-r--r--lib/gitlab/emoji.rb10
-rw-r--r--lib/gitlab/encoding_helper.rb33
-rw-r--r--lib/gitlab/exclusive_lease.rb11
-rw-r--r--lib/gitlab/file_detector.rb28
-rw-r--r--lib/gitlab/file_finder.rb5
-rw-r--r--lib/gitlab/fogbugz_import/client.rb1
-rw-r--r--lib/gitlab/fogbugz_import/importer.rb5
-rw-r--r--lib/gitlab/gfm/uploads_rewriter.rb4
-rw-r--r--lib/gitlab/git.rb19
-rw-r--r--lib/gitlab/git/attributes_at_ref_parser.rb14
-rw-r--r--lib/gitlab/git/attributes_parser.rb (renamed from lib/gitlab/git/attributes.rb)53
-rw-r--r--lib/gitlab/git/blame.rb4
-rw-r--r--lib/gitlab/git/blob.rb122
-rw-r--r--lib/gitlab/git/branch.rb24
-rw-r--r--lib/gitlab/git/commit.rb168
-rw-r--r--lib/gitlab/git/commit_stats.rb9
-rw-r--r--lib/gitlab/git/conflict/file.rb88
-rw-r--r--lib/gitlab/git/conflict/parser.rb91
-rw-r--r--lib/gitlab/git/conflict/resolution.rb15
-rw-r--r--lib/gitlab/git/conflict/resolver.rb118
-rw-r--r--lib/gitlab/git/diff.rb52
-rw-r--r--lib/gitlab/git/env.rb17
-rw-r--r--lib/gitlab/git/gitlab_projects.rb285
-rw-r--r--lib/gitlab/git/hook.rb29
-rw-r--r--lib/gitlab/git/hooks_service.rb15
-rw-r--r--lib/gitlab/git/index.rb12
-rw-r--r--lib/gitlab/git/info_attributes.rb49
-rw-r--r--lib/gitlab/git/lfs_changes.rb52
-rw-r--r--lib/gitlab/git/lfs_pointer_file.rb25
-rw-r--r--lib/gitlab/git/operation_service.rb38
-rw-r--r--lib/gitlab/git/path_helper.rb2
-rw-r--r--lib/gitlab/git/popen.rb84
-rw-r--r--lib/gitlab/git/ref.rb6
-rw-r--r--lib/gitlab/git/remote_mirror.rb89
-rw-r--r--lib/gitlab/git/remote_repository.rb84
-rw-r--r--lib/gitlab/git/repository.rb1587
-rw-r--r--lib/gitlab/git/repository_mirroring.rb91
-rw-r--r--lib/gitlab/git/rev_list.rb63
-rw-r--r--lib/gitlab/git/storage.rb3
-rw-r--r--lib/gitlab/git/storage/checker.rb120
-rw-r--r--lib/gitlab/git/storage/circuit_breaker.rb131
-rw-r--r--lib/gitlab/git/storage/circuit_breaker_settings.rb37
-rw-r--r--lib/gitlab/git/storage/failure_info.rb39
-rw-r--r--lib/gitlab/git/storage/forked_storage_check.rb14
-rw-r--r--lib/gitlab/git/storage/health.rb21
-rw-r--r--lib/gitlab/git/storage/null_circuit_breaker.rb50
-rw-r--r--lib/gitlab/git/tag.rb2
-rw-r--r--lib/gitlab/git/tree.rb25
-rw-r--r--lib/gitlab/git/user.rb20
-rw-r--r--lib/gitlab/git/wiki.rb296
-rw-r--r--lib/gitlab/git/wiki_file.rb20
-rw-r--r--lib/gitlab/git/wiki_page.rb40
-rw-r--r--lib/gitlab/git/wiki_page_version.rb19
-rw-r--r--lib/gitlab/git_access.rb147
-rw-r--r--lib/gitlab/git_access_wiki.rb17
-rw-r--r--lib/gitlab/git_ref_validator.rb2
-rw-r--r--lib/gitlab/gitaly_client.rb171
-rw-r--r--lib/gitlab/gitaly_client/attributes_bag.rb31
-rw-r--r--lib/gitlab/gitaly_client/blob_service.rb83
-rw-r--r--lib/gitlab/gitaly_client/blobs_stitcher.rb47
-rw-r--r--lib/gitlab/gitaly_client/commit_service.rb254
-rw-r--r--lib/gitlab/gitaly_client/conflict_files_stitcher.rb47
-rw-r--r--lib/gitlab/gitaly_client/conflicts_service.rb72
-rw-r--r--lib/gitlab/gitaly_client/diff.rb16
-rw-r--r--lib/gitlab/gitaly_client/diff_stitcher.rb2
-rw-r--r--lib/gitlab/gitaly_client/health_check_service.rb19
-rw-r--r--lib/gitlab/gitaly_client/namespace_service.rb39
-rw-r--r--lib/gitlab/gitaly_client/operation_service.rb321
-rw-r--r--lib/gitlab/gitaly_client/queue_enumerator.rb28
-rw-r--r--lib/gitlab/gitaly_client/ref_service.rb104
-rw-r--r--lib/gitlab/gitaly_client/remote_service.rb70
-rw-r--r--lib/gitlab/gitaly_client/repository_service.rb214
-rw-r--r--lib/gitlab/gitaly_client/server_service.rb16
-rw-r--r--lib/gitlab/gitaly_client/util.rb33
-rw-r--r--lib/gitlab/gitaly_client/wiki_file.rb9
-rw-r--r--lib/gitlab/gitaly_client/wiki_page.rb30
-rw-r--r--lib/gitlab/gitaly_client/wiki_service.rb210
-rw-r--r--lib/gitlab/github_import.rb38
-rw-r--r--lib/gitlab/github_import/bulk_importing.rb25
-rw-r--r--lib/gitlab/github_import/caching.rb151
-rw-r--r--lib/gitlab/github_import/client.rb267
-rw-r--r--lib/gitlab/github_import/importer/diff_note_importer.rb63
-rw-r--r--lib/gitlab/github_import/importer/diff_notes_importer.rb31
-rw-r--r--lib/gitlab/github_import/importer/issue_and_label_links_importer.rb25
-rw-r--r--lib/gitlab/github_import/importer/issue_importer.rb81
-rw-r--r--lib/gitlab/github_import/importer/issues_importer.rb35
-rw-r--r--lib/gitlab/github_import/importer/label_links_importer.rb52
-rw-r--r--lib/gitlab/github_import/importer/labels_importer.rb55
-rw-r--r--lib/gitlab/github_import/importer/milestones_importer.rb58
-rw-r--r--lib/gitlab/github_import/importer/note_importer.rb54
-rw-r--r--lib/gitlab/github_import/importer/notes_importer.rb31
-rw-r--r--lib/gitlab/github_import/importer/pull_request_importer.rb91
-rw-r--r--lib/gitlab/github_import/importer/pull_requests_importer.rb80
-rw-r--r--lib/gitlab/github_import/importer/releases_importer.rb55
-rw-r--r--lib/gitlab/github_import/importer/repository_importer.rb84
-rw-r--r--lib/gitlab/github_import/issuable_finder.rb81
-rw-r--r--lib/gitlab/github_import/label_finder.rb37
-rw-r--r--lib/gitlab/github_import/markdown_text.rb30
-rw-r--r--lib/gitlab/github_import/milestone_finder.rb40
-rw-r--r--lib/gitlab/github_import/page_counter.rb31
-rw-r--r--lib/gitlab/github_import/parallel_importer.rb48
-rw-r--r--lib/gitlab/github_import/parallel_scheduling.rb162
-rw-r--r--lib/gitlab/github_import/rate_limit_error.rb9
-rw-r--r--lib/gitlab/github_import/representation.rb25
-rw-r--r--lib/gitlab/github_import/representation/diff_note.rb87
-rw-r--r--lib/gitlab/github_import/representation/expose_attribute.rb26
-rw-r--r--lib/gitlab/github_import/representation/issue.rb80
-rw-r--r--lib/gitlab/github_import/representation/note.rb70
-rw-r--r--lib/gitlab/github_import/representation/pull_request.rb114
-rw-r--r--lib/gitlab/github_import/representation/to_hash.rb31
-rw-r--r--lib/gitlab/github_import/representation/user.rb34
-rw-r--r--lib/gitlab/github_import/sequential_importer.rb50
-rw-r--r--lib/gitlab/github_import/user_finder.rb164
-rw-r--r--lib/gitlab/gitlab_import/client.rb1
-rw-r--r--lib/gitlab/gon_helper.rb12
-rw-r--r--lib/gitlab/google_code_import/importer.rb3
-rw-r--r--lib/gitlab/gpg.rb15
-rw-r--r--lib/gitlab/gpg/commit.rb40
-rw-r--r--lib/gitlab/gpg/invalid_gpg_signature_updater.rb4
-rw-r--r--lib/gitlab/grape_logging/loggers/user_logger.rb18
-rw-r--r--lib/gitlab/group_hierarchy.rb30
-rw-r--r--lib/gitlab/health_checks/fs_shards_check.rb2
-rw-r--r--lib/gitlab/health_checks/gitaly_check.rb53
-rw-r--r--lib/gitlab/hook_data/issuable_builder.rb56
-rw-r--r--lib/gitlab/hook_data/issue_builder.rb54
-rw-r--r--lib/gitlab/hook_data/merge_request_builder.rb61
-rw-r--r--lib/gitlab/i18n.rb5
-rw-r--r--lib/gitlab/identifier.rb5
-rw-r--r--lib/gitlab/import_export.rb2
-rw-r--r--lib/gitlab/import_export/command_line_util.rb6
-rw-r--r--lib/gitlab/import_export/file_importer.rb8
-rw-r--r--lib/gitlab/import_export/import_export.yml16
-rw-r--r--lib/gitlab/import_export/importer.rb11
-rw-r--r--lib/gitlab/import_export/merge_request_parser.rb4
-rw-r--r--lib/gitlab/import_export/project_creator.rb23
-rw-r--r--lib/gitlab/import_export/project_tree_restorer.rb5
-rw-r--r--lib/gitlab/import_export/relation_factory.rb79
-rw-r--r--lib/gitlab/import_export/repo_restorer.rb2
-rw-r--r--lib/gitlab/import_export/repo_saver.rb2
-rw-r--r--lib/gitlab/import_export/saver.rb2
-rw-r--r--lib/gitlab/import_export/shared.rb31
-rw-r--r--lib/gitlab/import_export/uploads_saver.rb9
-rw-r--r--lib/gitlab/import_export/wiki_repo_saver.rb2
-rw-r--r--lib/gitlab/import_export/wiki_restorer.rb23
-rw-r--r--lib/gitlab/import_sources.rb5
-rw-r--r--lib/gitlab/insecure_key_fingerprint.rb23
-rw-r--r--lib/gitlab/issuable_metadata.rb8
-rw-r--r--lib/gitlab/job_waiter.rb14
-rw-r--r--lib/gitlab/kubernetes.rb2
-rw-r--r--lib/gitlab/kubernetes/config_map.rb37
-rw-r--r--lib/gitlab/kubernetes/helm.rb8
-rw-r--r--lib/gitlab/kubernetes/helm/api.rb45
-rw-r--r--lib/gitlab/kubernetes/helm/base_command.rb40
-rw-r--r--lib/gitlab/kubernetes/helm/init_command.rb19
-rw-r--r--lib/gitlab/kubernetes/helm/install_command.rb48
-rw-r--r--lib/gitlab/kubernetes/helm/pod.rb81
-rw-r--r--lib/gitlab/kubernetes/namespace.rb30
-rw-r--r--lib/gitlab/kubernetes/pod.rb12
-rw-r--r--lib/gitlab/ldap/access.rb87
-rw-r--r--lib/gitlab/ldap/adapter.rb104
-rw-r--r--lib/gitlab/ldap/auth_hash.rb36
-rw-r--r--lib/gitlab/ldap/authentication.rb69
-rw-r--r--lib/gitlab/ldap/config.rb230
-rw-r--r--lib/gitlab/ldap/person.rb81
-rw-r--r--lib/gitlab/ldap/user.rb80
-rw-r--r--lib/gitlab/legacy_github_import/base_formatter.rb (renamed from lib/gitlab/github_import/base_formatter.rb)2
-rw-r--r--lib/gitlab/legacy_github_import/branch_formatter.rb (renamed from lib/gitlab/github_import/branch_formatter.rb)2
-rw-r--r--lib/gitlab/legacy_github_import/client.rb148
-rw-r--r--lib/gitlab/legacy_github_import/comment_formatter.rb (renamed from lib/gitlab/github_import/comment_formatter.rb)4
-rw-r--r--lib/gitlab/legacy_github_import/importer.rb (renamed from lib/gitlab/github_import/importer.rb)7
-rw-r--r--lib/gitlab/legacy_github_import/issuable_formatter.rb (renamed from lib/gitlab/github_import/issuable_formatter.rb)2
-rw-r--r--lib/gitlab/legacy_github_import/issue_formatter.rb (renamed from lib/gitlab/github_import/issue_formatter.rb)2
-rw-r--r--lib/gitlab/legacy_github_import/label_formatter.rb (renamed from lib/gitlab/github_import/label_formatter.rb)2
-rw-r--r--lib/gitlab/legacy_github_import/milestone_formatter.rb (renamed from lib/gitlab/github_import/milestone_formatter.rb)2
-rw-r--r--lib/gitlab/legacy_github_import/project_creator.rb (renamed from lib/gitlab/github_import/project_creator.rb)6
-rw-r--r--lib/gitlab/legacy_github_import/pull_request_formatter.rb (renamed from lib/gitlab/github_import/pull_request_formatter.rb)2
-rw-r--r--lib/gitlab/legacy_github_import/release_formatter.rb (renamed from lib/gitlab/github_import/release_formatter.rb)2
-rw-r--r--lib/gitlab/legacy_github_import/user_formatter.rb (renamed from lib/gitlab/github_import/user_formatter.rb)2
-rw-r--r--lib/gitlab/legacy_github_import/wiki_formatter.rb (renamed from lib/gitlab/github_import/wiki_formatter.rb)4
-rw-r--r--lib/gitlab/lfs_token.rb4
-rw-r--r--lib/gitlab/logger.rb12
-rw-r--r--lib/gitlab/markdown/pipeline.rb32
-rw-r--r--lib/gitlab/metrics.rb4
-rw-r--r--lib/gitlab/metrics/background_transaction.rb14
-rw-r--r--lib/gitlab/metrics/base_sampler.rb63
-rw-r--r--lib/gitlab/metrics/influx_db.rb275
-rw-r--r--lib/gitlab/metrics/influx_sampler.rb101
-rw-r--r--lib/gitlab/metrics/instrumentation.rb11
-rw-r--r--lib/gitlab/metrics/method_call.rb46
-rw-r--r--lib/gitlab/metrics/methods.rb129
-rw-r--r--lib/gitlab/metrics/methods/metric_options.rb61
-rw-r--r--lib/gitlab/metrics/null_metric.rb2
-rw-r--r--lib/gitlab/metrics/prometheus.rb92
-rw-r--r--lib/gitlab/metrics/rack_middleware.rb67
-rw-r--r--lib/gitlab/metrics/samplers/base_sampler.rb64
-rw-r--r--lib/gitlab/metrics/samplers/influx_sampler.rb79
-rw-r--r--lib/gitlab/metrics/samplers/ruby_sampler.rb84
-rw-r--r--lib/gitlab/metrics/samplers/unicorn_sampler.rb50
-rw-r--r--lib/gitlab/metrics/sidekiq_metrics_exporter.rb2
-rw-r--r--lib/gitlab/metrics/sidekiq_middleware.rb2
-rw-r--r--lib/gitlab/metrics/subscribers/action_view.rb12
-rw-r--r--lib/gitlab/metrics/subscribers/active_record.rb13
-rw-r--r--lib/gitlab/metrics/subscribers/rails_cache.rb42
-rw-r--r--lib/gitlab/metrics/system.rb14
-rw-r--r--lib/gitlab/metrics/transaction.rb89
-rw-r--r--lib/gitlab/metrics/unicorn_sampler.rb48
-rw-r--r--lib/gitlab/metrics/web_transaction.rb82
-rw-r--r--lib/gitlab/middleware/go.rb28
-rw-r--r--lib/gitlab/middleware/multipart.rb10
-rw-r--r--lib/gitlab/middleware/rails_queue_duration.rb13
-rw-r--r--lib/gitlab/middleware/read_only.rb20
-rw-r--r--lib/gitlab/middleware/read_only/controller.rb86
-rw-r--r--lib/gitlab/middleware/release_env.rb14
-rw-r--r--lib/gitlab/middleware/static.rb2
-rw-r--r--lib/gitlab/multi_collection_paginator.rb64
-rw-r--r--lib/gitlab/o_auth/auth_hash.rb90
-rw-r--r--lib/gitlab/o_auth/provider.rb42
-rw-r--r--lib/gitlab/o_auth/session.rb19
-rw-r--r--lib/gitlab/o_auth/user.rb222
-rw-r--r--lib/gitlab/optimistic_locking.rb1
-rw-r--r--lib/gitlab/path_regex.rb31
-rw-r--r--lib/gitlab/performance_bar.rb5
-rw-r--r--lib/gitlab/performance_bar/peek_query_tracker.rb4
-rw-r--r--lib/gitlab/plugin.rb26
-rw-r--r--lib/gitlab/plugin_logger.rb7
-rw-r--r--lib/gitlab/polling_interval.rb6
-rw-r--r--lib/gitlab/popen.rb27
-rw-r--r--lib/gitlab/popen/runner.rb46
-rw-r--r--lib/gitlab/profiler.rb143
-rw-r--r--lib/gitlab/project_search_results.rb55
-rw-r--r--lib/gitlab/project_template.rb12
-rw-r--r--lib/gitlab/prometheus/additional_metrics_parser.rb21
-rw-r--r--lib/gitlab/prometheus/metric_group.rb7
-rw-r--r--lib/gitlab/prometheus/queries/additional_metrics_deployment_query.rb1
-rw-r--r--lib/gitlab/prometheus/queries/additional_metrics_environment_query.rb1
-rw-r--r--lib/gitlab/prometheus/queries/base_query.rb4
-rw-r--r--lib/gitlab/prometheus/queries/deployment_query.rb5
-rw-r--r--lib/gitlab/prometheus/queries/environment_query.rb5
-rw-r--r--lib/gitlab/prometheus/queries/matched_metric_query.rb (renamed from lib/gitlab/prometheus/queries/matched_metrics_query.rb)4
-rw-r--r--lib/gitlab/prometheus/queries/query_additional_metrics.rb30
-rw-r--r--lib/gitlab/prometheus_client.rb70
-rw-r--r--lib/gitlab/protocol_access.rb6
-rw-r--r--lib/gitlab/query_limiting.rb36
-rw-r--r--lib/gitlab/query_limiting/active_support_subscriber.rb13
-rw-r--r--lib/gitlab/query_limiting/middleware.rb55
-rw-r--r--lib/gitlab/query_limiting/transaction.rb77
-rw-r--r--lib/gitlab/quick_actions/command_definition.rb15
-rw-r--r--lib/gitlab/quick_actions/dsl.rb5
-rw-r--r--lib/gitlab/quick_actions/extractor.rb11
-rw-r--r--lib/gitlab/quick_actions/spend_time_and_date_separator.rb54
-rw-r--r--lib/gitlab/recaptcha.rb10
-rw-r--r--lib/gitlab/redis/cache.rb2
-rw-r--r--lib/gitlab/redis/wrapper.rb2
-rw-r--r--lib/gitlab/reference_extractor.rb2
-rw-r--r--lib/gitlab/regex.rb16
-rw-r--r--lib/gitlab/repo_path.rb2
-rw-r--r--lib/gitlab/routing.rb19
-rw-r--r--lib/gitlab/saml/auth_hash.rb17
-rw-r--r--lib/gitlab/saml/config.rb19
-rw-r--r--lib/gitlab/saml/user.rb70
-rw-r--r--lib/gitlab/search_results.rb102
-rw-r--r--lib/gitlab/seeder.rb11
-rw-r--r--lib/gitlab/sentry.rb4
-rw-r--r--lib/gitlab/setup_helper.rb61
-rw-r--r--lib/gitlab/shell.rb260
-rw-r--r--lib/gitlab/shell_adapter.rb2
-rw-r--r--lib/gitlab/sherlock/file_sample.rb2
-rw-r--r--lib/gitlab/sherlock/middleware.rb2
-rw-r--r--lib/gitlab/sherlock/query.rb4
-rw-r--r--lib/gitlab/sherlock/transaction.rb4
-rw-r--r--lib/gitlab/sidekiq_config.rb61
-rw-r--r--lib/gitlab/sidekiq_middleware/memory_killer.rb54
-rw-r--r--lib/gitlab/sidekiq_middleware/shutdown.rb133
-rw-r--r--lib/gitlab/sidekiq_status.rb7
-rw-r--r--lib/gitlab/sidekiq_versioning.rb25
-rw-r--r--lib/gitlab/sidekiq_versioning/manager.rb12
-rw-r--r--lib/gitlab/slash_commands/base_command.rb7
-rw-r--r--lib/gitlab/slash_commands/command.rb5
-rw-r--r--lib/gitlab/slash_commands/presenters/issue_base.rb14
-rw-r--r--lib/gitlab/snippet_search_results.rb2
-rw-r--r--lib/gitlab/sql/pattern.rb35
-rw-r--r--lib/gitlab/sql/union.rb13
-rw-r--r--lib/gitlab/ssh_public_key.rb28
-rw-r--r--lib/gitlab/storage_check.rb11
-rw-r--r--lib/gitlab/storage_check/cli.rb71
-rw-r--r--lib/gitlab/storage_check/gitlab_caller.rb39
-rw-r--r--lib/gitlab/storage_check/option_parser.rb39
-rw-r--r--lib/gitlab/storage_check/response.rb77
-rw-r--r--lib/gitlab/string_placeholder_replacer.rb27
-rw-r--r--lib/gitlab/string_range_marker.rb3
-rw-r--r--lib/gitlab/string_regex_marker.rb12
-rw-r--r--lib/gitlab/task_helpers.rb (renamed from lib/tasks/gitlab/task_helpers.rb)28
-rw-r--r--lib/gitlab/tcp_checker.rb45
-rw-r--r--lib/gitlab/template/finders/repo_template_finder.rb1
-rw-r--r--lib/gitlab/testing/request_blocker_middleware.rb14
-rw-r--r--lib/gitlab/testing/request_inspector_middleware.rb71
-rw-r--r--lib/gitlab/timeless.rb1
-rw-r--r--lib/gitlab/upgrader.rb7
-rw-r--r--lib/gitlab/uploads_transfer.rb2
-rw-r--r--lib/gitlab/url_blocker.rb4
-rw-r--r--lib/gitlab/url_sanitizer.rb9
-rw-r--r--lib/gitlab/usage_data.rb56
-rw-r--r--lib/gitlab/user_access.rb17
-rw-r--r--lib/gitlab/utils.rb29
-rw-r--r--lib/gitlab/utils/merge_hash.rb117
-rw-r--r--lib/gitlab/utils/override.rb111
-rw-r--r--lib/gitlab/utils/strong_memoize.rb41
-rw-r--r--lib/gitlab/verify/batch_verifier.rb64
-rw-r--r--lib/gitlab/verify/lfs_objects.rb27
-rw-r--r--lib/gitlab/verify/rake_task.rb53
-rw-r--r--lib/gitlab/verify/uploads.rb27
-rw-r--r--lib/gitlab/view/presenter/factory.rb2
-rw-r--r--lib/gitlab/visibility_level.rb18
-rw-r--r--lib/gitlab/workhorse.rb95
-rw-r--r--lib/google_api/auth.rb54
-rw-r--r--lib/google_api/cloud_platform/client.rb115
-rw-r--r--lib/haml_lint/inline_javascript.rb7
-rw-r--r--lib/milestone_array.rb40
-rw-r--r--lib/omni_auth/strategies/bitbucket.rb4
-rw-r--r--lib/peek/views/gitaly.rb34
-rw-r--r--lib/rouge/lexers/math.rb9
-rw-r--r--lib/rouge/lexers/plantuml.rb9
-rw-r--r--lib/rspec_flaky/config.rb21
-rw-r--r--lib/rspec_flaky/flaky_example.rb21
-rw-r--r--lib/rspec_flaky/flaky_examples_collection.rb37
-rw-r--r--lib/rspec_flaky/listener.rb63
-rw-r--r--lib/support/nginx/gitlab4
-rw-r--r--lib/support/nginx/gitlab-ssl4
-rw-r--r--lib/system_check/app/git_user_default_ssh_config_check.rb3
-rw-r--r--lib/system_check/app/git_version_check.rb4
-rw-r--r--lib/system_check/app/ruby_version_check.rb4
-rw-r--r--lib/system_check/helpers.rb2
-rw-r--r--lib/system_check/incoming_email/imap_authentication_check.rb45
-rw-r--r--lib/system_check/simple_executor.rb2
-rw-r--r--lib/tasks/brakeman.rake2
-rw-r--r--lib/tasks/dev.rake5
-rw-r--r--lib/tasks/flay.rake4
-rw-r--r--lib/tasks/gemojione.rake33
-rw-r--r--lib/tasks/gettext.rake1
-rw-r--r--lib/tasks/gitlab/assets.rake2
-rw-r--r--lib/tasks/gitlab/backup.rake77
-rw-r--r--lib/tasks/gitlab/check.rake76
-rw-r--r--lib/tasks/gitlab/cleanup.rake25
-rw-r--r--lib/tasks/gitlab/dev.rake7
-rw-r--r--lib/tasks/gitlab/git.rake41
-rw-r--r--lib/tasks/gitlab/gitaly.rake63
-rw-r--r--lib/tasks/gitlab/helpers.rake4
-rw-r--r--lib/tasks/gitlab/import.rake14
-rw-r--r--lib/tasks/gitlab/info.rake2
-rw-r--r--lib/tasks/gitlab/lfs/check.rake8
-rw-r--r--lib/tasks/gitlab/list_repos.rake1
-rw-r--r--lib/tasks/gitlab/setup.rake2
-rw-r--r--lib/tasks/gitlab/shell.rake20
-rw-r--r--lib/tasks/gitlab/sidekiq.rake47
-rw-r--r--lib/tasks/gitlab/storage.rake140
-rw-r--r--lib/tasks/gitlab/tcp_check.rake20
-rw-r--r--lib/tasks/gitlab/traces.rake24
-rw-r--r--lib/tasks/gitlab/update_templates.rake1
-rw-r--r--lib/tasks/gitlab/uploads/check.rake8
-rw-r--r--lib/tasks/gitlab/users.rake11
-rw-r--r--lib/tasks/gitlab/workhorse.rake4
-rw-r--r--lib/tasks/haml-lint.rake9
-rw-r--r--lib/tasks/import.rake59
-rw-r--r--lib/tasks/lint.rake61
-rw-r--r--lib/tasks/migrate/migrate_iids.rake3
-rw-r--r--lib/tasks/migrate/setup_postgresql.rake22
-rw-r--r--lib/tasks/plugins.rake16
-rw-r--r--lib/tasks/tokens.rake12
702 files changed, 22806 insertions, 6609 deletions
diff --git a/lib/additional_email_headers_interceptor.rb b/lib/additional_email_headers_interceptor.rb
index 2358fa6bbfd..3cb1694b9f1 100644
--- a/lib/additional_email_headers_interceptor.rb
+++ b/lib/additional_email_headers_interceptor.rb
@@ -1,8 +1,6 @@
class AdditionalEmailHeadersInterceptor
def self.delivering_email(message)
- message.headers(
- 'Auto-Submitted' => 'auto-generated',
- 'X-Auto-Response-Suppress' => 'All'
- )
+ message.header['Auto-Submitted'] ||= 'auto-generated'
+ message.header['X-Auto-Response-Suppress'] ||= 'All'
end
end
diff --git a/lib/after_commit_queue.rb b/lib/after_commit_queue.rb
index 4750a2c373a..a4d8507960e 100644
--- a/lib/after_commit_queue.rb
+++ b/lib/after_commit_queue.rb
@@ -6,12 +6,42 @@ module AfterCommitQueue
after_rollback :_clear_after_commit_queue
end
- def run_after_commit(method = nil, &block)
- _after_commit_queue << proc { self.send(method) } if method # rubocop:disable GitlabSecurity/PublicSend
+ def run_after_commit(&block)
_after_commit_queue << block if block
+
+ true
+ end
+
+ def run_after_commit_or_now(&block)
+ if AfterCommitQueue.inside_transaction?
+ if ActiveRecord::Base.connection.current_transaction.records.include?(self)
+ run_after_commit(&block)
+ else
+ # If the current transaction does not include this record, we can run
+ # the block now, even if it queues a Sidekiq job.
+ Sidekiq::Worker.skipping_transaction_check do
+ instance_eval(&block)
+ end
+ end
+ else
+ instance_eval(&block)
+ end
+
true
end
+ def self.open_transactions_baseline
+ if ::Rails.env.test?
+ return DatabaseCleaner.connections.count { |conn| conn.strategy.is_a?(DatabaseCleaner::ActiveRecord::Transaction) }
+ end
+
+ 0
+ end
+
+ def self.inside_transaction?
+ ActiveRecord::Base.connection.open_transactions > open_transactions_baseline
+ end
+
protected
def _run_after_commit_queue
diff --git a/lib/api/access_requests.rb b/lib/api/access_requests.rb
index 374b611f55e..ae13c248171 100644
--- a/lib/api/access_requests.rb
+++ b/lib/api/access_requests.rb
@@ -24,7 +24,7 @@ module API
access_requesters = AccessRequestsFinder.new(source).execute!(current_user)
access_requesters = paginate(access_requesters.includes(:user))
- present access_requesters.map(&:user), with: Entities::AccessRequester, source: source
+ present access_requesters, with: Entities::AccessRequester
end
desc "Requests access for the authenticated user to a #{source_type}." do
@@ -36,7 +36,7 @@ module API
access_requester = source.request_access(current_user)
if access_requester.persisted?
- present access_requester.user, with: Entities::AccessRequester, access_requester: access_requester
+ present access_requester, with: Entities::AccessRequester
else
render_validation_error!(access_requester)
end
@@ -53,10 +53,13 @@ module API
put ':id/access_requests/:user_id/approve' do
source = find_source(source_type, params[:id])
- member = ::Members::ApproveAccessRequestService.new(source, current_user, declared_params).execute
+ access_requester = source.requesters.find_by!(user_id: params[:user_id])
+ member = ::Members::ApproveAccessRequestService
+ .new(current_user, declared_params)
+ .execute(access_requester)
status :created
- present member.user, with: Entities::Member, member: member
+ present member, with: Entities::Member
end
desc 'Denies an access request for the given user.' do
@@ -70,8 +73,7 @@ module API
member = source.requesters.find_by!(user_id: params[:user_id])
destroy_conditionally!(member) do
- ::Members::DestroyService.new(source, current_user, params)
- .execute(:requesters)
+ ::Members::DestroyService.new(current_user).execute(member)
end
end
end
diff --git a/lib/api/api.rb b/lib/api/api.rb
index 79e55a2f4f7..5e93c129bc8 100644
--- a/lib/api/api.rb
+++ b/lib/api/api.rb
@@ -4,12 +4,17 @@ module API
LOG_FILENAME = Rails.root.join("log", "api_json.log")
+ NO_SLASH_URL_PART_REGEX = %r{[^/]+}
+ PROJECT_ENDPOINT_REQUIREMENTS = { id: NO_SLASH_URL_PART_REGEX }.freeze
+ COMMIT_ENDPOINT_REQUIREMENTS = PROJECT_ENDPOINT_REQUIREMENTS.merge(sha: NO_SLASH_URL_PART_REGEX).freeze
+
use GrapeLogging::Middleware::RequestLogger,
logger: Logger.new(LOG_FILENAME),
formatter: Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp.new,
include: [
GrapeLogging::Loggers::FilterParameters.new,
- GrapeLogging::Loggers::ClientEnv.new
+ GrapeLogging::Loggers::ClientEnv.new,
+ Gitlab::GrapeLogging::Loggers::UserLogger.new
]
allow_access_with_scope :api
@@ -57,7 +62,10 @@ module API
mount ::API::V3::Variables
end
- before { header['X-Frame-Options'] = 'SAMEORIGIN' }
+ before do
+ header['X-Frame-Options'] = 'SAMEORIGIN'
+ header['X-Content-Type-Options'] = 'nosniff'
+ end
# The locale is set to the current user's locale when `current_user` is loaded
after { Gitlab::I18n.use_default_locale }
@@ -96,12 +104,11 @@ module API
helpers ::API::Helpers
helpers ::API::Helpers::CommonHelpers
- NO_SLASH_URL_PART_REGEX = %r{[^/]+}
- PROJECT_ENDPOINT_REQUIREMENTS = { id: NO_SLASH_URL_PART_REGEX }.freeze
-
# Keep in alphabetical order
mount ::API::AccessRequests
+ mount ::API::Applications
mount ::API::AwardEmoji
+ mount ::API::Badges
mount ::API::Boards
mount ::API::Branches
mount ::API::BroadcastMessages
@@ -114,7 +121,9 @@ module API
mount ::API::Events
mount ::API::Features
mount ::API::Files
+ mount ::API::GroupBoards
mount ::API::Groups
+ mount ::API::GroupMilestones
mount ::API::Internal
mount ::API::Issues
mount ::API::Jobs
@@ -125,22 +134,24 @@ module API
mount ::API::Members
mount ::API::MergeRequestDiffs
mount ::API::MergeRequests
- mount ::API::ProjectMilestones
- mount ::API::GroupMilestones
mount ::API::Namespaces
mount ::API::Notes
mount ::API::NotificationSettings
+ mount ::API::PagesDomains
mount ::API::Pipelines
mount ::API::PipelineSchedules
+ mount ::API::ProjectExport
+ mount ::API::ProjectImport
mount ::API::ProjectHooks
mount ::API::Projects
+ mount ::API::ProjectMilestones
mount ::API::ProjectSnippets
mount ::API::ProtectedBranches
mount ::API::Repositories
mount ::API::Runner
mount ::API::Runners
+ mount ::API::Search
mount ::API::Services
- mount ::API::Session
mount ::API::Settings
mount ::API::SidekiqMetrics
mount ::API::Snippets
diff --git a/lib/api/api_guard.rb b/lib/api/api_guard.rb
index c4c0fdda665..c2113551207 100644
--- a/lib/api/api_guard.rb
+++ b/lib/api/api_guard.rb
@@ -6,9 +6,6 @@ module API
module APIGuard
extend ActiveSupport::Concern
- PRIVATE_TOKEN_HEADER = "HTTP_PRIVATE_TOKEN".freeze
- PRIVATE_TOKEN_PARAM = :private_token
-
included do |base|
# OAuth2 Resource Server Authentication
use Rack::OAuth2::Server::Resource::Bearer, 'The API' do |request|
@@ -42,76 +39,38 @@ module API
# Helper Methods for Grape Endpoint
module HelperMethods
- # Invokes the doorkeeper guard.
- #
- # If token is presented and valid, then it sets @current_user.
- #
- # If the token does not have sufficient scopes to cover the requred scopes,
- # then it raises InsufficientScopeError.
- #
- # If the token is expired, then it raises ExpiredError.
- #
- # If the token is revoked, then it raises RevokedError.
- #
- # If the token is not found (nil), then it returns nil
- #
- # Arguments:
- #
- # scopes: (optional) scopes required for this guard.
- # Defaults to empty array.
- #
- def doorkeeper_guard(scopes: [])
- access_token = find_access_token
- return nil unless access_token
-
- case AccessTokenValidationService.new(access_token, request: request).validate(scopes: scopes)
- when AccessTokenValidationService::INSUFFICIENT_SCOPE
- raise InsufficientScopeError.new(scopes)
-
- when AccessTokenValidationService::EXPIRED
- raise ExpiredError
-
- when AccessTokenValidationService::REVOKED
- raise RevokedError
-
- when AccessTokenValidationService::VALID
- @current_user = User.find(access_token.resource_owner_id)
- end
- end
+ include Gitlab::Auth::UserAuthFinders
- def find_user_by_private_token(scopes: [])
- token_string = (params[PRIVATE_TOKEN_PARAM] || env[PRIVATE_TOKEN_HEADER]).to_s
+ def find_current_user!
+ user = find_user_from_sources
+ return unless user
- return nil unless token_string.present?
+ forbidden!('User is blocked') unless Gitlab::UserAccess.new(user).allowed? && user.can?(:access_api)
- find_user_by_authentication_token(token_string) || find_user_by_personal_access_token(token_string, scopes)
+ user
end
- def current_user
- @current_user
+ def find_user_from_sources
+ find_user_from_access_token || find_user_from_warden
end
private
- def find_user_by_authentication_token(token_string)
- User.find_by_authentication_token(token_string)
- end
-
- def find_user_by_personal_access_token(token_string, scopes)
- access_token = PersonalAccessToken.active.find_by_token(token_string)
- return unless access_token
-
- if AccessTokenValidationService.new(access_token, request: request).include_any_scope?(scopes)
- User.find(access_token.user_id)
- end
- end
-
- def find_access_token
- @access_token ||= Doorkeeper.authenticate(doorkeeper_request, Doorkeeper.configuration.access_token_methods)
- end
-
- def doorkeeper_request
- @doorkeeper_request ||= ActionDispatch::Request.new(env)
+ # An array of scopes that were registered (using `allow_access_with_scope`)
+ # for the current endpoint class. It also returns scopes registered on
+ # `API::API`, since these are meant to apply to all API routes.
+ def scopes_registered_for_endpoint
+ @scopes_registered_for_endpoint ||=
+ begin
+ endpoint_classes = [options[:for].presence, ::API::API].compact
+ endpoint_classes.reduce([]) do |memo, endpoint|
+ if endpoint.respond_to?(:allowed_scopes)
+ memo.concat(endpoint.allowed_scopes)
+ else
+ memo
+ end
+ end
+ end
end
end
@@ -119,8 +78,11 @@ module API
private
def install_error_responders(base)
- error_classes = [MissingTokenError, TokenNotFoundError,
- ExpiredError, RevokedError, InsufficientScopeError]
+ error_classes = [Gitlab::Auth::MissingTokenError,
+ Gitlab::Auth::TokenNotFoundError,
+ Gitlab::Auth::ExpiredError,
+ Gitlab::Auth::RevokedError,
+ Gitlab::Auth::InsufficientScopeError]
base.__send__(:rescue_from, *error_classes, oauth2_bearer_token_error_handler) # rubocop:disable GitlabSecurity/PublicSend
end
@@ -129,25 +91,25 @@ module API
proc do |e|
response =
case e
- when MissingTokenError
+ when Gitlab::Auth::MissingTokenError
Rack::OAuth2::Server::Resource::Bearer::Unauthorized.new
- when TokenNotFoundError
+ when Gitlab::Auth::TokenNotFoundError
Rack::OAuth2::Server::Resource::Bearer::Unauthorized.new(
:invalid_token,
"Bad Access Token.")
- when ExpiredError
+ when Gitlab::Auth::ExpiredError
Rack::OAuth2::Server::Resource::Bearer::Unauthorized.new(
:invalid_token,
"Token is expired. You can either do re-authorization or token refresh.")
- when RevokedError
+ when Gitlab::Auth::RevokedError
Rack::OAuth2::Server::Resource::Bearer::Unauthorized.new(
:invalid_token,
"Token was revoked. You have to re-authorize from the user.")
- when InsufficientScopeError
+ when Gitlab::Auth::InsufficientScopeError
# FIXME: ForbiddenError (inherited from Bearer::Forbidden of Rack::Oauth2)
# does not include WWW-Authenticate header, which breaks the standard.
Rack::OAuth2::Server::Resource::Bearer::Forbidden.new(
@@ -160,21 +122,5 @@ module API
end
end
end
-
- #
- # Exceptions
- #
-
- MissingTokenError = Class.new(StandardError)
- TokenNotFoundError = Class.new(StandardError)
- ExpiredError = Class.new(StandardError)
- RevokedError = Class.new(StandardError)
-
- class InsufficientScopeError < StandardError
- attr_reader :scopes
- def initialize(scopes)
- @scopes = scopes
- end
- end
end
end
diff --git a/lib/api/applications.rb b/lib/api/applications.rb
new file mode 100644
index 00000000000..b122cdefe4e
--- /dev/null
+++ b/lib/api/applications.rb
@@ -0,0 +1,27 @@
+module API
+ # External applications API
+ class Applications < Grape::API
+ before { authenticated_as_admin! }
+
+ resource :applications do
+ desc 'Create a new application' do
+ detail 'This feature was introduced in GitLab 10.5'
+ success Entities::ApplicationWithSecret
+ end
+ params do
+ requires :name, type: String, desc: 'Application name'
+ requires :redirect_uri, type: String, desc: 'Application redirect URI'
+ requires :scopes, type: String, desc: 'Application scopes'
+ end
+ post do
+ application = Doorkeeper::Application.new(declared_params)
+
+ if application.save
+ present application, with: Entities::ApplicationWithSecret
+ else
+ render_validation_error! application
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/badges.rb b/lib/api/badges.rb
new file mode 100644
index 00000000000..334948b2995
--- /dev/null
+++ b/lib/api/badges.rb
@@ -0,0 +1,134 @@
+module API
+ class Badges < Grape::API
+ include PaginationParams
+
+ before { authenticate_non_get! }
+
+ helpers ::API::Helpers::BadgesHelpers
+
+ helpers do
+ def find_source_if_admin(source_type)
+ source = find_source(source_type, params[:id])
+
+ authorize_admin_source!(source_type, source)
+
+ source
+ end
+ end
+
+ %w[group project].each do |source_type|
+ params do
+ requires :id, type: String, desc: "The ID of a #{source_type}"
+ end
+ resource source_type.pluralize, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
+ desc "Gets a list of #{source_type} badges viewable by the authenticated user." do
+ detail 'This feature was introduced in GitLab 10.6.'
+ success Entities::Badge
+ end
+ params do
+ use :pagination
+ end
+ get ":id/badges" do
+ source = find_source(source_type, params[:id])
+
+ present_badges(source, paginate(source.badges))
+ end
+
+ desc "Preview a badge from a #{source_type}." do
+ detail 'This feature was introduced in GitLab 10.6.'
+ success Entities::BasicBadgeDetails
+ end
+ params do
+ requires :link_url, type: String, desc: 'URL of the badge link'
+ requires :image_url, type: String, desc: 'URL of the badge image'
+ end
+ get ":id/badges/render" do
+ authenticate!
+
+ source = find_source_if_admin(source_type)
+
+ badge = ::Badges::BuildService.new(declared_params(include_missing: false))
+ .execute(source)
+
+ if badge.valid?
+ present_badges(source, badge, with: Entities::BasicBadgeDetails)
+ else
+ render_validation_error!(badge)
+ end
+ end
+
+ desc "Gets a badge of a #{source_type}." do
+ detail 'This feature was introduced in GitLab 10.6.'
+ success Entities::Badge
+ end
+ params do
+ requires :badge_id, type: Integer, desc: 'The badge ID'
+ end
+ get ":id/badges/:badge_id" do
+ source = find_source(source_type, params[:id])
+ badge = find_badge(source)
+
+ present_badges(source, badge)
+ end
+
+ desc "Adds a badge to a #{source_type}." do
+ detail 'This feature was introduced in GitLab 10.6.'
+ success Entities::Badge
+ end
+ params do
+ requires :link_url, type: String, desc: 'URL of the badge link'
+ requires :image_url, type: String, desc: 'URL of the badge image'
+ end
+ post ":id/badges" do
+ source = find_source_if_admin(source_type)
+
+ badge = ::Badges::CreateService.new(declared_params(include_missing: false)).execute(source)
+
+ if badge.persisted?
+ present_badges(source, badge)
+ else
+ render_validation_error!(badge)
+ end
+ end
+
+ desc "Updates a badge of a #{source_type}." do
+ detail 'This feature was introduced in GitLab 10.6.'
+ success Entities::Badge
+ end
+ params do
+ optional :link_url, type: String, desc: 'URL of the badge link'
+ optional :image_url, type: String, desc: 'URL of the badge image'
+ end
+ put ":id/badges/:badge_id" do
+ source = find_source_if_admin(source_type)
+
+ badge = ::Badges::UpdateService.new(declared_params(include_missing: false))
+ .execute(find_badge(source))
+
+ if badge.valid?
+ present_badges(source, badge)
+ else
+ render_validation_error!(badge)
+ end
+ end
+
+ desc 'Removes a badge from a project or group.' do
+ detail 'This feature was introduced in GitLab 10.6.'
+ end
+ params do
+ requires :badge_id, type: Integer, desc: 'The badge ID'
+ end
+ delete ":id/badges/:badge_id" do
+ source = find_source_if_admin(source_type)
+ badge = find_badge(source)
+
+ if badge.is_a?(GroupBadge) && source.is_a?(Project)
+ error!('To delete a Group badge please use the Group endpoint', 403)
+ end
+
+ destroy_conditionally!(badge)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/boards.rb b/lib/api/boards.rb
index 366b0dc9a6f..6c706b2b4e1 100644
--- a/lib/api/boards.rb
+++ b/lib/api/boards.rb
@@ -1,45 +1,46 @@
module API
class Boards < Grape::API
+ include BoardsResponses
include PaginationParams
before { authenticate! }
+ helpers do
+ def board_parent
+ user_project
+ end
+ end
+
params do
requires :id, type: String, desc: 'The ID of a project'
end
resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
- desc 'Get all project boards' do
- detail 'This feature was introduced in 8.13'
- success Entities::Board
- end
- params do
- use :pagination
- end
- get ':id/boards' do
- authorize!(:read_board, user_project)
- present paginate(user_project.boards), with: Entities::Board
+ segment ':id/boards' do
+ desc 'Get all project boards' do
+ detail 'This feature was introduced in 8.13'
+ success Entities::Board
+ end
+ params do
+ use :pagination
+ end
+ get '/' do
+ authorize!(:read_board, user_project)
+ present paginate(board_parent.boards), with: Entities::Board
+ end
+
+ desc 'Find a project board' do
+ detail 'This feature was introduced in 10.4'
+ success Entities::Board
+ end
+ get '/:board_id' do
+ present board, with: Entities::Board
+ end
end
params do
requires :board_id, type: Integer, desc: 'The ID of a board'
end
segment ':id/boards/:board_id' do
- helpers do
- def project_board
- board = user_project.boards.first
-
- if params[:board_id] == board.id
- board
- else
- not_found!('Board')
- end
- end
-
- def board_lists
- project_board.lists.destroyable
- end
- end
-
desc 'Get the lists of a project board' do
detail 'Does not include `done` list. This feature was introduced in 8.13'
success Entities::List
@@ -72,22 +73,13 @@ module API
requires :label_id, type: Integer, desc: 'The ID of an existing label'
end
post '/lists' do
- unless available_labels.exists?(params[:label_id])
+ unless available_labels_for(user_project).exists?(params[:label_id])
render_api_error!({ error: 'Label not found!' }, 400)
end
authorize!(:admin_list, user_project)
- service = ::Boards::Lists::CreateService.new(user_project, current_user,
- { label_id: params[:label_id] })
-
- list = service.execute(project_board)
-
- if list.valid?
- present list, with: Entities::List
- else
- render_validation_error!(list)
- end
+ create_list
end
desc 'Moves a board list to a new position' do
@@ -99,18 +91,11 @@ module API
requires :position, type: Integer, desc: 'The position of the list'
end
put '/lists/:list_id' do
- list = project_board.lists.movable.find(params[:list_id])
+ list = board_lists.find(params[:list_id])
authorize!(:admin_list, user_project)
- service = ::Boards::Lists::MoveService.new(user_project, current_user,
- { position: params[:position] })
-
- if service.execute(list)
- present list, with: Entities::List
- else
- render_api_error!({ error: "List could not be moved!" }, 400)
- end
+ move_list(list)
end
desc 'Delete a board list' do
@@ -124,12 +109,7 @@ module API
authorize!(:admin_list, user_project)
list = board_lists.find(params[:list_id])
- destroy_conditionally!(list) do |list|
- service = ::Boards::Lists::DestroyService.new(user_project, current_user)
- unless service.execute(list)
- render_api_error!({ error: 'List could not be deleted!' }, 400)
- end
- end
+ destroy_list(list)
end
end
end
diff --git a/lib/api/boards_responses.rb b/lib/api/boards_responses.rb
new file mode 100644
index 00000000000..ead0943a74d
--- /dev/null
+++ b/lib/api/boards_responses.rb
@@ -0,0 +1,50 @@
+module API
+ module BoardsResponses
+ extend ActiveSupport::Concern
+
+ included do
+ helpers do
+ def board
+ board_parent.boards.find(params[:board_id])
+ end
+
+ def board_lists
+ board.lists.destroyable
+ end
+
+ def create_list
+ create_list_service =
+ ::Boards::Lists::CreateService.new(board_parent, current_user, { label_id: params[:label_id] })
+
+ list = create_list_service.execute(board)
+
+ if list.valid?
+ present list, with: Entities::List
+ else
+ render_validation_error!(list)
+ end
+ end
+
+ def move_list(list)
+ move_list_service =
+ ::Boards::Lists::MoveService.new(board_parent, current_user, { position: params[:position].to_i })
+
+ if move_list_service.execute(list)
+ present list, with: Entities::List
+ else
+ render_api_error!({ error: "List could not be moved!" }, 400)
+ end
+ end
+
+ def destroy_list(list)
+ destroy_conditionally!(list) do |list|
+ service = ::Boards::Lists::DestroyService.new(board_parent, current_user)
+ unless service.execute(list)
+ render_api_error!({ error: 'List could not be deleted!' }, 400)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/branches.rb b/lib/api/branches.rb
index 643c8e6fb8e..13cfba728fa 100644
--- a/lib/api/branches.rb
+++ b/lib/api/branches.rb
@@ -8,28 +8,51 @@ module API
before { authorize! :download_code, user_project }
+ helpers do
+ def find_branch!(branch_name)
+ begin
+ user_project.repository.find_branch(branch_name) || not_found!('Branch')
+ rescue Gitlab::Git::CommandError
+ render_api_error!('The branch refname is invalid', 400)
+ end
+ end
+
+ params :filter_params do
+ optional :search, type: String, desc: 'Return list of branches matching the search criteria'
+ end
+ end
+
params do
requires :id, type: String, desc: 'The ID of a project'
end
resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
desc 'Get a project repository branches' do
- success Entities::RepoBranch
+ success Entities::Branch
end
params do
use :pagination
+ use :filter_params
end
get ':id/repository/branches' do
- branches = ::Kaminari.paginate_array(user_project.repository.branches.sort_by(&:name))
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42329')
- # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37442
- Gitlab::GitalyClient.allow_n_plus_1_calls do
- present paginate(branches), with: Entities::RepoBranch, project: user_project
- end
+ repository = user_project.repository
+
+ branches = BranchesFinder.new(repository, declared_params(include_missing: false)).execute
+
+ merged_branch_names = repository.merged_branch_names(branches.map(&:name))
+
+ present(
+ paginate(::Kaminari.paginate_array(branches)),
+ with: Entities::Branch,
+ project: user_project,
+ merged_branch_names: merged_branch_names
+ )
end
resource ':id/repository/branches/:branch', requirements: BRANCH_ENDPOINT_REQUIREMENTS do
desc 'Get a single branch' do
- success Entities::RepoBranch
+ success Entities::Branch
end
params do
requires :branch, type: String, desc: 'The name of the branch'
@@ -38,10 +61,9 @@ module API
user_project.repository.branch_exists?(params[:branch]) ? status(204) : status(404)
end
get do
- branch = user_project.repository.find_branch(params[:branch])
- not_found!('Branch') unless branch
+ branch = find_branch!(params[:branch])
- present branch, with: Entities::RepoBranch, project: user_project
+ present branch, with: Entities::Branch, project: user_project
end
end
@@ -50,7 +72,7 @@ module API
# in `gitlab-org/gitlab-ce!5081`. The API interface has not been changed (to maintain compatibility),
# but it works with the changed data model to infer `developers_can_merge` and `developers_can_push`.
desc 'Protect a single branch' do
- success Entities::RepoBranch
+ success Entities::Branch
end
params do
requires :branch, type: String, desc: 'The name of the branch'
@@ -60,8 +82,7 @@ module API
put ':id/repository/branches/:branch/protect', requirements: BRANCH_ENDPOINT_REQUIREMENTS do
authorize_admin_project
- branch = user_project.repository.find_branch(params[:branch])
- not_found!('Branch') unless branch
+ branch = find_branch!(params[:branch])
protected_branch = user_project.protected_branches.find_by(name: branch.name)
@@ -74,13 +95,13 @@ module API
service_args = [user_project, current_user, protected_branch_params]
protected_branch = if protected_branch
- ::ProtectedBranches::ApiUpdateService.new(*service_args).execute(protected_branch)
+ ::ProtectedBranches::LegacyApiUpdateService.new(*service_args).execute(protected_branch)
else
- ::ProtectedBranches::ApiCreateService.new(*service_args).execute
+ ::ProtectedBranches::LegacyApiCreateService.new(*service_args).execute
end
if protected_branch.valid?
- present branch, with: Entities::RepoBranch, project: user_project
+ present branch, with: Entities::Branch, project: user_project
else
render_api_error!(protected_branch.errors.full_messages, 422)
end
@@ -88,7 +109,7 @@ module API
# Note: This API will be deprecated in favor of the protected branches API.
desc 'Unprotect a single branch' do
- success Entities::RepoBranch
+ success Entities::Branch
end
params do
requires :branch, type: String, desc: 'The name of the branch'
@@ -96,16 +117,15 @@ module API
put ':id/repository/branches/:branch/unprotect', requirements: BRANCH_ENDPOINT_REQUIREMENTS do
authorize_admin_project
- branch = user_project.repository.find_branch(params[:branch])
- not_found!("Branch") unless branch
+ branch = find_branch!(params[:branch])
protected_branch = user_project.protected_branches.find_by(name: branch.name)
protected_branch&.destroy
- present branch, with: Entities::RepoBranch, project: user_project
+ present branch, with: Entities::Branch, project: user_project
end
desc 'Create branch' do
- success Entities::RepoBranch
+ success Entities::Branch
end
params do
requires :branch, type: String, desc: 'The name of the branch'
@@ -119,7 +139,7 @@ module API
if result[:status] == :success
present result[:branch],
- with: Entities::RepoBranch,
+ with: Entities::Branch,
project: user_project
else
render_api_error!(result[:message], 400)
@@ -133,8 +153,7 @@ module API
delete ':id/repository/branches/:branch', requirements: BRANCH_ENDPOINT_REQUIREMENTS do
authorize_push_project
- branch = user_project.repository.find_branch(params[:branch])
- not_found!('Branch') unless branch
+ branch = find_branch!(params[:branch])
commit = user_project.repository.commit(branch.dereferenced_target)
diff --git a/lib/api/circuit_breakers.rb b/lib/api/circuit_breakers.rb
index 118883f5ea5..c13154dc0ec 100644
--- a/lib/api/circuit_breakers.rb
+++ b/lib/api/circuit_breakers.rb
@@ -17,11 +17,11 @@ module API
end
def storage_health
- @failing_storage_health ||= Gitlab::Git::Storage::Health.for_all_storages
+ @storage_health ||= Gitlab::Git::Storage::Health.for_all_storages
end
end
- desc 'Get all failing git storages' do
+ desc 'Get all git storages' do
detail 'This feature was introduced in GitLab 9.5'
success Entities::RepositoryStorageHealth
end
@@ -41,7 +41,7 @@ module API
detail 'This feature was introduced in GitLab 9.5'
end
delete do
- Gitlab::Git::Storage::CircuitBreaker.reset_all!
+ Gitlab::Git::Storage::FailureInfo.reset_all!
end
end
end
diff --git a/lib/api/commits.rb b/lib/api/commits.rb
index 4b8d248f5f7..982f45425a3 100644
--- a/lib/api/commits.rb
+++ b/lib/api/commits.rb
@@ -4,8 +4,6 @@ module API
class Commits < Grape::API
include PaginationParams
- COMMIT_ENDPOINT_REQUIREMENTS = API::PROJECT_ENDPOINT_REQUIREMENTS.merge(sha: API::NO_SLASH_URL_PART_REGEX)
-
before { authorize! :download_code, user_project }
params do
@@ -13,32 +11,35 @@ module API
end
resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
desc 'Get a project repository commits' do
- success Entities::RepoCommit
+ success Entities::Commit
end
params do
optional :ref_name, type: String, desc: 'The name of a repository branch or tag, if not given the default branch is used'
optional :since, type: DateTime, desc: 'Only commits after or on this date will be returned'
optional :until, type: DateTime, desc: 'Only commits before or on this date will be returned'
optional :path, type: String, desc: 'The file path'
+ optional :all, type: Boolean, desc: 'Every commit will be returned'
use :pagination
end
get ':id/repository/commits' do
path = params[:path]
before = params[:until]
after = params[:since]
- ref = params[:ref_name] || user_project.try(:default_branch) || 'master'
+ ref = params[:ref_name] || user_project.try(:default_branch) || 'master' unless params[:all]
offset = (params[:page] - 1) * params[:per_page]
+ all = params[:all]
commits = user_project.repository.commits(ref,
path: path,
limit: params[:per_page],
offset: offset,
before: before,
- after: after)
+ after: after,
+ all: all)
commit_count =
- if path || before || after
- user_project.repository.count_commits(ref: ref, path: path, before: before, after: after)
+ if all || path || before || after
+ user_project.repository.count_commits(ref: ref, path: path, before: before, after: after, all: all)
else
# Cacheable commit count.
user_project.repository.commit_count_for_ref(ref)
@@ -46,11 +47,11 @@ module API
paginated_commits = Kaminari.paginate_array(commits, total_count: commit_count)
- present paginate(paginated_commits), with: Entities::RepoCommit
+ present paginate(paginated_commits), with: Entities::Commit
end
desc 'Commit multiple file changes as one commit' do
- success Entities::RepoCommitDetail
+ success Entities::CommitDetail
detail 'This feature was introduced in GitLab 8.13'
end
params do
@@ -72,25 +73,26 @@ module API
if result[:status] == :success
commit_detail = user_project.repository.commit(result[:result])
- present commit_detail, with: Entities::RepoCommitDetail
+ present commit_detail, with: Entities::CommitDetail
else
render_api_error!(result[:message], 400)
end
end
desc 'Get a specific commit of a project' do
- success Entities::RepoCommitDetail
+ success Entities::CommitDetail
failure [[404, 'Commit Not Found']]
end
params do
requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag'
+ optional :stats, type: Boolean, default: true, desc: 'Include commit stats'
end
- get ':id/repository/commits/:sha', requirements: COMMIT_ENDPOINT_REQUIREMENTS do
+ get ':id/repository/commits/:sha', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
commit = user_project.commit(params[:sha])
not_found! 'Commit' unless commit
- present commit, with: Entities::RepoCommitDetail
+ present commit, with: Entities::CommitDetail, stats: params[:stats]
end
desc 'Get the diff for a specific commit of a project' do
@@ -98,13 +100,16 @@ module API
end
params do
requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag'
+ use :pagination
end
- get ':id/repository/commits/:sha/diff', requirements: COMMIT_ENDPOINT_REQUIREMENTS do
+ get ':id/repository/commits/:sha/diff', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
commit = user_project.commit(params[:sha])
not_found! 'Commit' unless commit
- present commit.raw_diffs.to_a, with: Entities::RepoDiff
+ raw_diffs = ::Kaminari.paginate_array(commit.raw_diffs.to_a)
+
+ present paginate(raw_diffs), with: Entities::Diff
end
desc "Get a commit's comments" do
@@ -115,24 +120,24 @@ module API
use :pagination
requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag'
end
- get ':id/repository/commits/:sha/comments', requirements: COMMIT_ENDPOINT_REQUIREMENTS do
+ get ':id/repository/commits/:sha/comments', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
commit = user_project.commit(params[:sha])
not_found! 'Commit' unless commit
- notes = user_project.notes.where(commit_id: commit.id).order(:created_at)
+ notes = commit.notes.order(:created_at)
present paginate(notes), with: Entities::CommitNote
end
desc 'Cherry pick commit into a branch' do
detail 'This feature was introduced in GitLab 8.15'
- success Entities::RepoCommit
+ success Entities::Commit
end
params do
requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag to be cherry picked'
requires :branch, type: String, desc: 'The name of the branch'
end
- post ':id/repository/commits/:sha/cherry_pick', requirements: COMMIT_ENDPOINT_REQUIREMENTS do
+ post ':id/repository/commits/:sha/cherry_pick', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
authorize! :push_code, user_project
commit = user_project.commit(params[:sha])
@@ -151,12 +156,33 @@ module API
if result[:status] == :success
branch = user_project.repository.find_branch(params[:branch])
- present user_project.repository.commit(branch.dereferenced_target), with: Entities::RepoCommit
+ present user_project.repository.commit(branch.dereferenced_target), with: Entities::Commit
else
render_api_error!(result[:message], 400)
end
end
+ desc 'Get all references a commit is pushed to' do
+ detail 'This feature was introduced in GitLab 10.6'
+ success Entities::BasicRef
+ end
+ params do
+ requires :sha, type: String, desc: 'A commit sha'
+ optional :type, type: String, values: %w[branch tag all], default: 'all', desc: 'Scope'
+ use :pagination
+ end
+ get ':id/repository/commits/:sha/refs', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
+ commit = user_project.commit(params[:sha])
+ not_found!('Commit') unless commit
+
+ refs = []
+ refs.concat(user_project.repository.branch_names_contains(commit.id).map {|name| { type: 'branch', name: name }}) unless params[:type] == 'tag'
+ refs.concat(user_project.repository.tag_names_contains(commit.id).map {|name| { type: 'tag', name: name }}) unless params[:type] == 'branch'
+ refs = Kaminari.paginate_array(refs)
+
+ present paginate(refs), with: Entities::BasicRef
+ end
+
desc 'Post comment to commit' do
success Entities::CommitNote
end
@@ -166,10 +192,10 @@ module API
optional :path, type: String, desc: 'The file path'
given :path do
requires :line, type: Integer, desc: 'The line number'
- requires :line_type, type: String, values: %w(new old), default: 'new', desc: 'The type of the line'
+ requires :line_type, type: String, values: %w[new old], default: 'new', desc: 'The type of the line'
end
end
- post ':id/repository/commits/:sha/comments', requirements: COMMIT_ENDPOINT_REQUIREMENTS do
+ post ':id/repository/commits/:sha/comments', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
commit = user_project.commit(params[:sha])
not_found! 'Commit' unless commit
@@ -182,11 +208,13 @@ module API
if params[:path]
commit.raw_diffs(limits: false).each do |diff|
next unless diff.new_path == params[:path]
+
lines = Gitlab::Diff::Parser.new.parse(diff.diff.each_line)
lines.each do |line|
next unless line.new_pos == params[:line] && line.type == params[:line_type]
- break opts[:line_code] = Gitlab::Diff::LineCode.generate(diff.new_path, line.new_pos, line.old_pos)
+
+ break opts[:line_code] = Gitlab::Git.diff_line_code(diff.new_path, line.new_pos, line.old_pos)
end
break if opts[:line_code]
diff --git a/lib/api/custom_attributes_endpoints.rb b/lib/api/custom_attributes_endpoints.rb
new file mode 100644
index 00000000000..5000aa0d9ac
--- /dev/null
+++ b/lib/api/custom_attributes_endpoints.rb
@@ -0,0 +1,77 @@
+module API
+ module CustomAttributesEndpoints
+ extend ActiveSupport::Concern
+
+ included do
+ attributable_class = name.demodulize.singularize
+ attributable_key = attributable_class.underscore
+ attributable_name = attributable_class.humanize(capitalize: false)
+ attributable_finder = "find_#{attributable_key}"
+
+ helpers do
+ params :custom_attributes_key do
+ requires :key, type: String, desc: 'The key of the custom attribute'
+ end
+ end
+
+ desc "Get all custom attributes on a #{attributable_name}" do
+ success Entities::CustomAttribute
+ end
+ get ':id/custom_attributes' do
+ resource = public_send(attributable_finder, params[:id]) # rubocop:disable GitlabSecurity/PublicSend
+ authorize! :read_custom_attribute
+
+ present resource.custom_attributes, with: Entities::CustomAttribute
+ end
+
+ desc "Get a custom attribute on a #{attributable_name}" do
+ success Entities::CustomAttribute
+ end
+ params do
+ use :custom_attributes_key
+ end
+ get ':id/custom_attributes/:key' do
+ resource = public_send(attributable_finder, params[:id]) # rubocop:disable GitlabSecurity/PublicSend
+ authorize! :read_custom_attribute
+
+ custom_attribute = resource.custom_attributes.find_by!(key: params[:key])
+
+ present custom_attribute, with: Entities::CustomAttribute
+ end
+
+ desc "Set a custom attribute on a #{attributable_name}"
+ params do
+ use :custom_attributes_key
+ requires :value, type: String, desc: 'The value of the custom attribute'
+ end
+ put ':id/custom_attributes/:key' do
+ resource = public_send(attributable_finder, params[:id]) # rubocop:disable GitlabSecurity/PublicSend
+ authorize! :update_custom_attribute
+
+ custom_attribute = resource.custom_attributes
+ .find_or_initialize_by(key: params[:key])
+
+ custom_attribute.update(value: params[:value])
+
+ if custom_attribute.valid?
+ present custom_attribute, with: Entities::CustomAttribute
+ else
+ render_validation_error!(custom_attribute)
+ end
+ end
+
+ desc "Delete a custom attribute on a #{attributable_name}"
+ params do
+ use :custom_attributes_key
+ end
+ delete ':id/custom_attributes/:key' do
+ resource = public_send(attributable_finder, params[:id]) # rubocop:disable GitlabSecurity/PublicSend
+ authorize! :update_custom_attribute
+
+ resource.custom_attributes.find_by!(key: params[:key]).destroy
+
+ status 204
+ end
+ end
+ end
+end
diff --git a/lib/api/deploy_keys.rb b/lib/api/deploy_keys.rb
index 281269b1190..b0b7b50998f 100644
--- a/lib/api/deploy_keys.rb
+++ b/lib/api/deploy_keys.rb
@@ -4,6 +4,16 @@ module API
before { authenticate! }
+ helpers do
+ def add_deploy_keys_project(project, attrs = {})
+ project.deploy_keys_projects.create(attrs)
+ end
+
+ def find_by_deploy_key(project, key_id)
+ project.deploy_keys_projects.find_by!(deploy_key: key_id)
+ end
+ end
+
desc 'Return all deploy keys'
params do
use :pagination
@@ -21,28 +31,31 @@ module API
before { authorize_admin_project }
desc "Get a specific project's deploy keys" do
- success Entities::SSHKey
+ success Entities::DeployKeysProject
end
params do
use :pagination
end
get ":id/deploy_keys" do
- present paginate(user_project.deploy_keys), with: Entities::SSHKey
+ keys = user_project.deploy_keys_projects.preload(:deploy_key)
+
+ present paginate(keys), with: Entities::DeployKeysProject
end
desc 'Get single deploy key' do
- success Entities::SSHKey
+ success Entities::DeployKeysProject
end
params do
requires :key_id, type: Integer, desc: 'The ID of the deploy key'
end
get ":id/deploy_keys/:key_id" do
- key = user_project.deploy_keys.find params[:key_id]
- present key, with: Entities::SSHKey
+ key = find_by_deploy_key(user_project, params[:key_id])
+
+ present key, with: Entities::DeployKeysProject
end
desc 'Add new deploy key to currently authenticated user' do
- success Entities::SSHKey
+ success Entities::DeployKeysProject
end
params do
requires :key, type: String, desc: 'The new deploy key'
@@ -53,24 +66,31 @@ module API
params[:key].strip!
# Check for an existing key joined to this project
- key = user_project.deploy_keys.find_by(key: params[:key])
+ key = user_project.deploy_keys_projects
+ .joins(:deploy_key)
+ .find_by(keys: { key: params[:key] })
+
if key
- present key, with: Entities::SSHKey
+ present key, with: Entities::DeployKeysProject
break
end
# Check for available deploy keys in other projects
key = current_user.accessible_deploy_keys.find_by(key: params[:key])
if key
- user_project.deploy_keys << key
- present key, with: Entities::SSHKey
+ added_key = add_deploy_keys_project(user_project, deploy_key: key, can_push: !!params[:can_push])
+
+ present added_key, with: Entities::DeployKeysProject
break
end
# Create a new deploy key
- key = DeployKey.new(declared_params(include_missing: false))
- if key.valid? && user_project.deploy_keys << key
- present key, with: Entities::SSHKey
+ key_attributes = { can_push: !!params[:can_push],
+ deploy_key_attributes: declared_params.except(:can_push) }
+ key = add_deploy_keys_project(user_project, key_attributes)
+
+ if key.valid?
+ present key, with: Entities::DeployKeysProject
else
render_validation_error!(key)
end
@@ -86,14 +106,21 @@ module API
at_least_one_of :title, :can_push
end
put ":id/deploy_keys/:key_id" do
- key = DeployKey.find(params.delete(:key_id))
+ deploy_keys_project = find_by_deploy_key(user_project, params[:key_id])
- authorize!(:update_deploy_key, key)
+ authorize!(:update_deploy_key, deploy_keys_project.deploy_key)
- if key.update_attributes(declared_params(include_missing: false))
- present key, with: Entities::SSHKey
+ can_push = params[:can_push].nil? ? deploy_keys_project.can_push : params[:can_push]
+ title = params[:title] || deploy_keys_project.deploy_key.title
+
+ result = deploy_keys_project.update_attributes(can_push: can_push,
+ deploy_key_attributes: { id: params[:key_id],
+ title: title })
+
+ if result
+ present deploy_keys_project, with: Entities::DeployKeysProject
else
- render_validation_error!(key)
+ render_validation_error!(deploy_keys_project)
end
end
@@ -122,7 +149,7 @@ module API
requires :key_id, type: Integer, desc: 'The ID of the deploy key'
end
delete ":id/deploy_keys/:key_id" do
- key = user_project.deploy_keys_projects.find_by(deploy_key_id: params[:key_id])
+ key = user_project.deploy_keys.find(params[:key_id])
not_found!('Deploy Key') unless key
destroy_conditionally!(key)
diff --git a/lib/api/deployments.rb b/lib/api/deployments.rb
index 1efee9a1324..184fae0eb76 100644
--- a/lib/api/deployments.rb
+++ b/lib/api/deployments.rb
@@ -15,11 +15,13 @@ module API
end
params do
use :pagination
+ optional :order_by, type: String, values: %w[id iid created_at ref], default: 'id', desc: 'Return deployments ordered by `id` or `iid` or `created_at` or `ref`'
+ optional :sort, type: String, values: %w[asc desc], default: 'asc', desc: 'Sort by asc (ascending) or desc (descending)'
end
get ':id/deployments' do
authorize! :read_deployment, user_project
- present paginate(user_project.deployments), with: Entities::Deployment
+ present paginate(user_project.deployments.order(params[:order_by] => params[:sort])), with: Entities::Deployment
end
desc 'Gets a specific deployment' do
diff --git a/lib/api/entities.rb b/lib/api/entities.rb
index 71253f72533..f39906270d8 100644
--- a/lib/api/entities.rb
+++ b/lib/api/entities.rb
@@ -16,10 +16,14 @@ module API
class UserBasic < UserSafe
expose :state
+
expose :avatar_url do |user, options|
user.avatar_url(only_path: false)
end
+ expose :avatar_path, if: ->(user, options) { options.fetch(:only_path, false) && user.avatar_path }
+ expose :custom_attributes, using: 'API::Entities::CustomAttribute', if: :with_custom_attributes
+
expose :web_url do |user, options|
Gitlab::Routing.url_helpers.user_url(user)
end
@@ -57,21 +61,17 @@ module API
expose :admin?, as: :is_admin
end
- class UserWithPrivateDetails < UserWithAdmin
- expose :private_token
- end
-
class Email < Grape::Entity
expose :id, :email
end
class Hook < Grape::Entity
- expose :id, :url, :created_at, :push_events, :tag_push_events, :repository_update_events
+ expose :id, :url, :created_at, :push_events, :tag_push_events, :merge_requests_events, :repository_update_events
expose :enable_ssl_verification
end
class ProjectHook < Hook
- expose :project_id, :issues_events, :merge_requests_events
+ expose :project_id, :issues_events, :confidential_issues_events
expose :note_events, :pipeline_events, :wiki_page_events
expose :job_events
end
@@ -84,13 +84,61 @@ module API
expose :group_access, as: :group_access_level
end
- class BasicProjectDetails < Grape::Entity
- expose :id, :description, :default_branch, :tag_list
- expose :ssh_url_to_repo, :http_url_to_repo, :web_url
+ class ProjectIdentity < Grape::Entity
+ expose :id, :description
expose :name, :name_with_namespace
expose :path, :path_with_namespace
+ expose :created_at
+ end
+
+ class ProjectExportStatus < ProjectIdentity
+ include ::API::Helpers::RelatedResourcesHelpers
+
+ expose :export_status
+ expose :_links, if: lambda { |project, _options| project.export_status == :finished } do
+ expose :api_url do |project|
+ expose_url(api_v4_projects_export_download_path(id: project.id))
+ end
+
+ expose :web_url do |project|
+ Gitlab::Routing.url_helpers.download_export_project_url(project)
+ end
+ end
+ end
+
+ class ProjectImportStatus < ProjectIdentity
+ expose :import_status
+
+ # TODO: Use `expose_nil` once we upgrade the grape-entity gem
+ expose :import_error, if: lambda { |status, _ops| status.import_error }
+ end
+
+ class BasicProjectDetails < ProjectIdentity
+ include ::API::ProjectsRelationBuilder
+
+ expose :default_branch
+ # Avoids an N+1 query: https://github.com/mbleigh/acts-as-taggable-on/issues/91#issuecomment-168273770
+ expose :tag_list do |project|
+ # project.tags.order(:name).pluck(:name) is the most suitable option
+ # to avoid loading all the ActiveRecord objects but, if we use it here
+ # it override the preloaded associations and makes a query
+ # (fixed in https://github.com/rails/rails/pull/25976).
+ project.tags.map(&:name).sort
+ end
+ expose :ssh_url_to_repo, :http_url_to_repo, :web_url
+ expose :avatar_url do |project, options|
+ project.avatar_url(only_path: false)
+ end
expose :star_count, :forks_count
- expose :created_at, :last_activity_at
+ expose :last_activity_at
+
+ expose :custom_attributes, using: 'API::Entities::CustomAttribute', if: :with_custom_attributes
+
+ def self.preload_relation(projects_relation, options = {})
+ projects_relation.preload(:project_feature, :route)
+ .preload(namespace: [:route, :owner],
+ tags: :taggings)
+ end
end
class Project < BasicProjectDetails
@@ -142,19 +190,17 @@ module API
expose :shared_runners_enabled
expose :lfs_enabled?, as: :lfs_enabled
expose :creator_id
- expose :namespace, using: 'API::Entities::Namespace'
+ expose :namespace, using: 'API::Entities::NamespaceBasic'
expose :forked_from_project, using: Entities::BasicProjectDetails, if: lambda { |project, options| project.forked? }
expose :import_status
expose :import_error, if: lambda { |_project, options| options[:user_can_admin_project] }
- expose :avatar_url do |user, options|
- user.avatar_url(only_path: false)
- end
+
expose :open_issues_count, if: lambda { |project, options| project.feature_available?(:issues, options[:current_user]) }
expose :runners_token, if: lambda { |_project, options| options[:user_can_admin_project] }
expose :public_builds, as: :public_jobs
expose :ci_config_path
expose :shared_with_groups do |project, options|
- SharedGroup.represent(project.project_group_links.all, options)
+ SharedGroup.represent(project.project_group_links, options)
end
expose :only_allow_merge_if_pipeline_succeeds
expose :request_access_enabled
@@ -162,6 +208,18 @@ module API
expose :printing_merge_request_link_enabled
expose :statistics, using: 'API::Entities::ProjectStatistics', if: :statistics
+
+ def self.preload_relation(projects_relation, options = {})
+ super(projects_relation).preload(:group)
+ .preload(project_group_links: :group,
+ fork_network: :root_project,
+ forked_project_link: :forked_from_project,
+ forked_from_project: [:route, :forks, namespace: :route, tags: :taggings])
+ end
+
+ def self.forks_counting_projects(projects_relation)
+ projects_relation + projects_relation.map(&:forked_from_project).compact
+ end
end
class ProjectStatistics < Grape::Entity
@@ -172,29 +230,22 @@ module API
expose :build_artifacts_size, as: :job_artifacts_size
end
- class Member < UserBasic
- expose :access_level do |user, options|
- member = options[:member] || options[:source].members.find_by(user_id: user.id)
- member.access_level
- end
- expose :expires_at do |user, options|
- member = options[:member] || options[:source].members.find_by(user_id: user.id)
- member.expires_at
- end
+ class Member < Grape::Entity
+ expose :user, merge: true, using: UserBasic
+ expose :access_level
+ expose :expires_at
end
- class AccessRequester < UserBasic
- expose :requested_at do |user, options|
- access_requester = options[:access_requester] || options[:source].requesters.find_by(user_id: user.id)
- access_requester.requested_at
- end
+ class AccessRequester < Grape::Entity
+ expose :user, merge: true, using: UserBasic
+ expose :requested_at
end
class Group < Grape::Entity
expose :id, :name, :path, :description, :visibility
expose :lfs_enabled?, as: :lfs_enabled
- expose :avatar_url do |user, options|
- user.avatar_url(only_path: false)
+ expose :avatar_url do |group, options|
+ group.avatar_url(only_path: false)
end
expose :web_url
expose :request_access_enabled
@@ -204,6 +255,8 @@ module API
expose :parent_id
end
+ expose :custom_attributes, using: 'API::Entities::CustomAttribute', if: :with_custom_attributes
+
expose :statistics, if: :statistics do
with_options format_with: -> (value) { value.to_i } do
expose :storage_size
@@ -215,11 +268,24 @@ module API
end
class GroupDetail < Group
- expose :projects, using: Entities::Project
- expose :shared_projects, using: Entities::Project
+ expose :projects, using: Entities::Project do |group, options|
+ GroupProjectsFinder.new(
+ group: group,
+ current_user: options[:current_user],
+ options: { only_owned: true }
+ ).execute
+ end
+
+ expose :shared_projects, using: Entities::Project do |group, options|
+ GroupProjectsFinder.new(
+ group: group,
+ current_user: options[:current_user],
+ options: { only_shared: true }
+ ).execute
+ end
end
- class RepoCommit < Grape::Entity
+ class Commit < Grape::Entity
expose :id, :short_id, :title, :created_at
expose :parent_ids
expose :safe_message, as: :message
@@ -227,26 +293,33 @@ module API
expose :committer_name, :committer_email, :committed_date
end
- class RepoCommitStats < Grape::Entity
+ class CommitStats < Grape::Entity
expose :additions, :deletions, :total
end
- class RepoCommitDetail < RepoCommit
- expose :stats, using: Entities::RepoCommitStats
+ class CommitDetail < Commit
+ expose :stats, using: Entities::CommitStats, if: :stats
expose :status
+ expose :last_pipeline, using: 'API::Entities::PipelineBasic'
+ expose :project_id
end
- class RepoBranch < Grape::Entity
+ class BasicRef < Grape::Entity
+ expose :type, :name
+ end
+
+ class Branch < Grape::Entity
expose :name
- expose :commit, using: Entities::RepoCommit do |repo_branch, options|
+ expose :commit, using: Entities::Commit do |repo_branch, options|
options[:project].repository.commit(repo_branch.dereferenced_target)
end
expose :merged do |repo_branch, options|
- # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37442
- Gitlab::GitalyClient.allow_n_plus_1_calls do
- options[:project].repository.merged_to_root_ref?(repo_branch.name)
+ if options[:merged_branch_names]
+ options[:merged_branch_names].include?(repo_branch.name)
+ else
+ options[:project].repository.merged_to_root_ref?(repo_branch)
end
end
@@ -263,7 +336,7 @@ module API
end
end
- class RepoTreeObject < Grape::Entity
+ class TreeObject < Grape::Entity
expose :id, :name, :type, :path
expose :mode do |obj, options|
@@ -273,24 +346,20 @@ module API
end
end
- class ProjectSnippet < Grape::Entity
+ class Snippet < Grape::Entity
expose :id, :title, :file_name, :description
expose :author, using: Entities::UserBasic
expose :updated_at, :created_at
-
- expose :web_url do |snippet, options|
+ expose :project_id
+ expose :web_url do |snippet|
Gitlab::UrlBuilder.build(snippet)
end
end
- class PersonalSnippet < Grape::Entity
- expose :id, :title, :file_name, :description
- expose :author, using: Entities::UserBasic
- expose :updated_at, :created_at
+ class ProjectSnippet < Snippet
+ end
- expose :web_url do |snippet|
- Gitlab::UrlBuilder.build(snippet)
- end
+ class PersonalSnippet < Snippet
expose :raw_url do |snippet|
Gitlab::UrlBuilder.build(snippet) + "/raw"
end
@@ -303,7 +372,7 @@ module API
expose :state, :created_at, :updated_at
end
- class RepoDiff < Grape::Entity
+ class Diff < Grape::Entity
expose :old_path, :new_path, :a_mode, :b_mode
expose :new_file?, as: :new_file
expose :renamed_file?, as: :renamed_file
@@ -366,6 +435,7 @@ module API
end
expose :due_date
expose :confidential
+ expose :discussion_locked
expose :web_url do |issue, options|
Gitlab::UrlBuilder.build(issue)
@@ -426,6 +496,10 @@ module API
expose :id
end
+ class PipelineBasic < Grape::Entity
+ expose :id, :sha, :ref, :status
+ end
+
class MergeRequestSimple < ProjectEntity
expose :title
expose :web_url do |merge_request, options|
@@ -458,10 +532,19 @@ module API
expose :work_in_progress?, as: :work_in_progress
expose :milestone, using: Entities::Milestone
expose :merge_when_pipeline_succeeds
- expose :merge_status
+
+ # Ideally we should deprecate `MergeRequest#merge_status` exposure and
+ # use `MergeRequest#mergeable?` instead (boolean).
+ # See https://gitlab.com/gitlab-org/gitlab-ce/issues/42344 for more
+ # information.
+ expose :merge_status do |merge_request|
+ merge_request.check_if_can_be_merged
+ merge_request.merge_status
+ end
expose :diff_head_sha, as: :sha
expose :merge_commit_sha
expose :user_notes_count
+ expose :discussion_locked
expose :should_remove_source_branch?, as: :should_remove_source_branch
expose :force_remove_source_branch?, as: :force_remove_source_branch
@@ -478,10 +561,50 @@ module API
expose :subscribed do |merge_request, options|
merge_request.subscribed?(options[:current_user], options[:project])
end
+
+ expose :changes_count do |merge_request, _options|
+ merge_request.merge_request_diff.real_size
+ end
+
+ expose :merged_by, using: Entities::UserBasic do |merge_request, _options|
+ merge_request.metrics&.merged_by
+ end
+
+ expose :merged_at do |merge_request, _options|
+ merge_request.metrics&.merged_at
+ end
+
+ expose :closed_by, using: Entities::UserBasic do |merge_request, _options|
+ merge_request.metrics&.latest_closed_by
+ end
+
+ expose :closed_at do |merge_request, _options|
+ merge_request.metrics&.latest_closed_at
+ end
+
+ expose :latest_build_started_at, if: -> (_, options) { build_available?(options) } do |merge_request, _options|
+ merge_request.metrics&.latest_build_started_at
+ end
+
+ expose :latest_build_finished_at, if: -> (_, options) { build_available?(options) } do |merge_request, _options|
+ merge_request.metrics&.latest_build_finished_at
+ end
+
+ expose :first_deployed_to_production_at, if: -> (_, options) { build_available?(options) } do |merge_request, _options|
+ merge_request.metrics&.first_deployed_to_production_at
+ end
+
+ expose :pipeline, using: Entities::PipelineBasic, if: -> (_, options) { build_available?(options) } do |merge_request, _options|
+ merge_request.metrics&.pipeline
+ end
+
+ def build_available?(options)
+ options[:project]&.feature_available?(:builds, options[:current_user])
+ end
end
class MergeRequestChanges < MergeRequest
- expose :diffs, as: :changes, using: Entities::RepoDiff do |compare, _|
+ expose :diffs, as: :changes, using: Entities::Diff do |compare, _|
compare.raw_diffs(limits: false).to_a
end
end
@@ -492,21 +615,26 @@ module API
end
class MergeRequestDiffFull < MergeRequestDiff
- expose :commits, using: Entities::RepoCommit
+ expose :commits, using: Entities::Commit
- expose :diffs, using: Entities::RepoDiff do |compare, _|
+ expose :diffs, using: Entities::Diff do |compare, _|
compare.raw_diffs(limits: false).to_a
end
end
class SSHKey < Grape::Entity
- expose :id, :title, :key, :created_at, :can_push
+ expose :id, :title, :key, :created_at
end
class SSHKeyWithUser < SSHKey
expose :user, using: Entities::UserPublic
end
+ class DeployKeysProject < Grape::Entity
+ expose :deploy_key, merge: true, using: Entities::SSHKey
+ expose :can_push
+ end
+
class GPGKey < Grape::Entity
expose :id, :key, :created_at
end
@@ -590,8 +718,7 @@ module API
expose :target_type
expose :target do |todo, options|
- target = todo.target_type == 'Commit' ? 'RepoCommit' : todo.target_type
- Entities.const_get(target).represent(todo.target, options)
+ Entities.const_get(todo.target_type).represent(todo.target, options)
end
expose :target_url do |todo, options|
@@ -609,9 +736,11 @@ module API
expose :created_at
end
- class Namespace < Grape::Entity
+ class NamespaceBasic < Grape::Entity
expose :id, :name, :path, :kind, :full_path, :parent_id
+ end
+ class Namespace < NamespaceBasic
expose :members_count_with_descendants, if: -> (namespace, opts) { expose_members_count_with_descendants?(namespace, opts) } do |namespace, _|
namespace.users_with_descendants.count
end
@@ -653,15 +782,13 @@ module API
class ProjectService < Grape::Entity
expose :id, :title, :created_at, :updated_at, :active
- expose :push_events, :issues_events, :merge_requests_events
- expose :tag_push_events, :note_events, :pipeline_events
+ expose :push_events, :issues_events, :confidential_issues_events
+ expose :merge_requests_events, :tag_push_events, :note_events
+ expose :pipeline_events, :wiki_page_events
expose :job_events
# Expose serialized properties
expose :properties do |service, options|
- field_names = service.fields
- .select { |field| options[:include_passwords] || field[:type] != 'password' }
- .map { |field| field[:name] }
- service.properties.slice(*field_names)
+ service.properties.slice(*service.api_field_names)
end
end
@@ -671,7 +798,7 @@ module API
if options.key?(:project_members)
(options[:project_members] || []).find { |member| member.source_id == project.id }
else
- project.project_members.find_by(user_id: options[:current_user].id)
+ project.project_member(options[:current_user])
end
end
@@ -680,11 +807,25 @@ module API
if options.key?(:group_members)
(options[:group_members] || []).find { |member| member.source_id == project.namespace_id }
else
- project.group.group_members.find_by(user_id: options[:current_user].id)
+ project.group.group_member(options[:current_user])
end
end
end
end
+
+ def self.preload_relation(projects_relation, options = {})
+ relation = super(projects_relation, options)
+
+ unless options.key?(:group_members)
+ relation = relation.preload(group: [group_members: [:source, user: [notification_settings: :source]]])
+ end
+
+ unless options.key?(:project_members)
+ relation = relation.preload(project_members: [:source, user: [notification_settings: :source]])
+ end
+
+ relation
+ end
end
class LabelBasic < Grape::Entity
@@ -721,21 +862,23 @@ module API
class Board < Grape::Entity
expose :id
+ expose :project, using: Entities::BasicProjectDetails
+
expose :lists, using: Entities::List do |board|
board.lists.destroyable
end
end
class Compare < Grape::Entity
- expose :commit, using: Entities::RepoCommit do |compare, options|
- Commit.decorate(compare.commits, nil).last
+ expose :commit, using: Entities::Commit do |compare, options|
+ ::Commit.decorate(compare.commits, nil).last
end
- expose :commits, using: Entities::RepoCommit do |compare, options|
- Commit.decorate(compare.commits, nil)
+ expose :commits, using: Entities::Commit do |compare, options|
+ ::Commit.decorate(compare.commits, nil)
end
- expose :diffs, using: Entities::RepoDiff do |compare, options|
+ expose :diffs, using: Entities::Diff do |compare, options|
compare.diffs(limits: false).to_a
end
@@ -763,7 +906,10 @@ module API
expose(:default_project_visibility) { |setting, _options| Gitlab::VisibilityLevel.string_level(setting.default_project_visibility) }
expose(:default_snippet_visibility) { |setting, _options| Gitlab::VisibilityLevel.string_level(setting.default_snippet_visibility) }
expose(:default_group_visibility) { |setting, _options| Gitlab::VisibilityLevel.string_level(setting.default_group_visibility) }
- expose :password_authentication_enabled, as: :signin_enabled
+
+ # support legacy names, can be removed in v5
+ expose :password_authentication_enabled_for_web, as: :password_authentication_enabled
+ expose :password_authentication_enabled_for_web, as: :signin_enabled
end
class Release < Grape::Entity
@@ -771,10 +917,10 @@ module API
expose :description
end
- class RepoTag < Grape::Entity
+ class Tag < Grape::Entity
expose :name, :message
- expose :commit, using: Entities::RepoCommit do |repo_tag, options|
+ expose :commit, using: Entities::Commit do |repo_tag, options|
options[:project].repository.commit(repo_tag.dereferenced_target)
end
@@ -789,6 +935,8 @@ module API
expose :active
expose :is_shared
expose :name
+ expose :online?, as: :online
+ expose :status
end
class RunnerDetails < Runner
@@ -816,24 +964,28 @@ module API
expose :filename, :size
end
- class PipelineBasic < Grape::Entity
- expose :id, :sha, :ref, :status
- end
-
- class Job < Grape::Entity
+ class JobBasic < Grape::Entity
expose :id, :status, :stage, :name, :ref, :tag, :coverage
expose :created_at, :started_at, :finished_at
+ expose :duration
expose :user, with: User
+ expose :commit, with: Commit
+ expose :pipeline, with: PipelineBasic
+ end
+
+ class Job < JobBasic
expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? }
- expose :commit, with: RepoCommit
expose :runner, with: Runner
- expose :pipeline, with: PipelineBasic
+ end
+
+ class JobBasicWithProject < JobBasic
+ expose :project, with: ProjectIdentity
end
class Trigger < Grape::Entity
expose :id
expose :token, :description
- expose :created_at, :updated_at, :deleted_at, :last_used
+ expose :created_at, :updated_at, :last_used
expose :owner, using: Entities::UserBasic
end
@@ -878,7 +1030,7 @@ module API
expose :deployable, using: Entities::Job
end
- class RepoLicense < Grape::Entity
+ class License < Grape::Entity
expose :key, :name, :nickname
expose :featured, as: :popular
expose :url, as: :html_url
@@ -986,13 +1138,9 @@ module API
expose :type, :url, :username, :password
end
- class ArtifactFile < Grape::Entity
- expose :filename, :size
- end
-
class Dependency < Grape::Entity
expose :id, :name, :token
- expose :artifacts_file, using: ArtifactFile, if: ->(job, _) { job.artifacts? }
+ expose :artifacts_file, using: JobArtifactFile, if: ->(job, _) { job.artifacts? }
end
class Response < Grape::Entity
@@ -1020,6 +1168,7 @@ module API
expose :cache, using: Cache
expose :credentials, using: Credentials
expose :dependencies, using: Dependency
+ expose :features
end
end
@@ -1034,5 +1183,90 @@ module API
expose :failing_on_hosts
expose :total_failures
end
+
+ class CustomAttribute < Grape::Entity
+ expose :key
+ expose :value
+ end
+
+ class PagesDomainCertificateExpiration < Grape::Entity
+ expose :expired?, as: :expired
+ expose :expiration
+ end
+
+ class PagesDomainCertificate < Grape::Entity
+ expose :subject
+ expose :expired?, as: :expired
+ expose :certificate
+ expose :certificate_text
+ end
+
+ class PagesDomainBasic < Grape::Entity
+ expose :domain
+ expose :url
+ expose :project_id
+ expose :verified?, as: :verified
+ expose :verification_code, as: :verification_code
+ expose :enabled_until
+
+ expose :certificate,
+ as: :certificate_expiration,
+ if: ->(pages_domain, _) { pages_domain.certificate? },
+ using: PagesDomainCertificateExpiration do |pages_domain|
+ pages_domain
+ end
+ end
+
+ class PagesDomain < Grape::Entity
+ expose :domain
+ expose :url
+ expose :verified?, as: :verified
+ expose :verification_code, as: :verification_code
+ expose :enabled_until
+
+ expose :certificate,
+ if: ->(pages_domain, _) { pages_domain.certificate? },
+ using: PagesDomainCertificate do |pages_domain|
+ pages_domain
+ end
+ end
+
+ class Application < Grape::Entity
+ expose :uid, as: :application_id
+ expose :redirect_uri, as: :callback_url
+ end
+
+ # Use with care, this exposes the secret
+ class ApplicationWithSecret < Application
+ expose :secret
+ end
+
+ class Blob < Grape::Entity
+ expose :basename
+ expose :data
+ expose :filename
+ expose :id
+ expose :ref
+ expose :startline
+ expose :project_id
+ end
+
+ class BasicBadgeDetails < Grape::Entity
+ expose :link_url
+ expose :image_url
+ expose :rendered_link_url do |badge, options|
+ badge.rendered_link_url(options.fetch(:project, nil))
+ end
+ expose :rendered_image_url do |badge, options|
+ badge.rendered_image_url(options.fetch(:project, nil))
+ end
+ end
+
+ class Badge < BasicBadgeDetails
+ expose :id
+ expose :kind do |badge|
+ badge.type == 'ProjectBadge' ? 'project' : 'group'
+ end
+ end
end
end
diff --git a/lib/api/group_boards.rb b/lib/api/group_boards.rb
new file mode 100644
index 00000000000..aa9fff25fc8
--- /dev/null
+++ b/lib/api/group_boards.rb
@@ -0,0 +1,117 @@
+module API
+ class GroupBoards < Grape::API
+ include BoardsResponses
+ include PaginationParams
+
+ before do
+ authenticate!
+ end
+
+ helpers do
+ def board_parent
+ user_group
+ end
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a group'
+ end
+
+ resource :groups, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
+ segment ':id/boards' do
+ desc 'Find a group board' do
+ detail 'This feature was introduced in 10.6'
+ success ::API::Entities::Board
+ end
+ get '/:board_id' do
+ present board, with: ::API::Entities::Board
+ end
+
+ desc 'Get all group boards' do
+ detail 'This feature was introduced in 10.6'
+ success Entities::Board
+ end
+ params do
+ use :pagination
+ end
+ get '/' do
+ present paginate(board_parent.boards), with: Entities::Board
+ end
+ end
+
+ params do
+ requires :board_id, type: Integer, desc: 'The ID of a board'
+ end
+ segment ':id/boards/:board_id' do
+ desc 'Get the lists of a group board' do
+ detail 'Does not include backlog and closed lists. This feature was introduced in 10.6'
+ success Entities::List
+ end
+ params do
+ use :pagination
+ end
+ get '/lists' do
+ present paginate(board_lists), with: Entities::List
+ end
+
+ desc 'Get a list of a group board' do
+ detail 'This feature was introduced in 10.6'
+ success Entities::List
+ end
+ params do
+ requires :list_id, type: Integer, desc: 'The ID of a list'
+ end
+ get '/lists/:list_id' do
+ present board_lists.find(params[:list_id]), with: Entities::List
+ end
+
+ desc 'Create a new board list' do
+ detail 'This feature was introduced in 10.6'
+ success Entities::List
+ end
+ params do
+ requires :label_id, type: Integer, desc: 'The ID of an existing label'
+ end
+ post '/lists' do
+ unless available_labels_for(board_parent).exists?(params[:label_id])
+ render_api_error!({ error: 'Label not found!' }, 400)
+ end
+
+ authorize!(:admin_list, user_group)
+
+ create_list
+ end
+
+ desc 'Moves a board list to a new position' do
+ detail 'This feature was introduced in 10.6'
+ success Entities::List
+ end
+ params do
+ requires :list_id, type: Integer, desc: 'The ID of a list'
+ requires :position, type: Integer, desc: 'The position of the list'
+ end
+ put '/lists/:list_id' do
+ list = board_lists.find(params[:list_id])
+
+ authorize!(:admin_list, user_group)
+
+ move_list(list)
+ end
+
+ desc 'Delete a board list' do
+ detail 'This feature was introduced in 10.6'
+ success Entities::List
+ end
+ params do
+ requires :list_id, type: Integer, desc: 'The ID of a board list'
+ end
+ delete "/lists/:list_id" do
+ authorize!(:admin_list, user_group)
+ list = board_lists.find(params[:list_id])
+
+ destroy_list(list)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/groups.rb b/lib/api/groups.rb
index e817dcbbc4b..4a4df1b8b9e 100644
--- a/lib/api/groups.rb
+++ b/lib/api/groups.rb
@@ -1,6 +1,7 @@
module API
class Groups < Grape::API
include PaginationParams
+ include Helpers::CustomAttributes
before { authenticate_non_get! }
@@ -25,22 +26,7 @@ module API
optional :statistics, type: Boolean, default: false, desc: 'Include project statistics'
end
- def present_groups(groups, options = {})
- options = options.reverse_merge(
- with: Entities::Group,
- current_user: current_user
- )
-
- groups = groups.with_statistics if options[:statistics]
- present paginate(groups), options
- end
- end
-
- resource :groups do
- desc 'Get a groups list' do
- success Entities::Group
- end
- params do
+ params :group_list_params do
use :statistics_params
optional :skip_groups, type: Array[Integer], desc: 'Array of group ids to exclude from list'
optional :all_available, type: Boolean, desc: 'Show all group that you have access to'
@@ -50,14 +36,57 @@ module API
optional :sort, type: String, values: %w[asc desc], default: 'asc', desc: 'Sort by asc (ascending) or desc (descending)'
use :pagination
end
- get do
- find_params = { all_available: params[:all_available], owned: params[:owned] }
+
+ def find_groups(params)
+ find_params = {
+ all_available: params[:all_available],
+ custom_attributes: params[:custom_attributes],
+ owned: params[:owned]
+ }
+ find_params[:parent] = find_group!(params[:id]) if params[:id]
+
groups = GroupsFinder.new(current_user, find_params).execute
groups = groups.search(params[:search]) if params[:search].present?
groups = groups.where.not(id: params[:skip_groups]) if params[:skip_groups].present?
groups = groups.reorder(params[:order_by] => params[:sort])
- present_groups groups, statistics: params[:statistics] && current_user.admin?
+ groups
+ end
+
+ def find_group_projects(params)
+ group = find_group!(params[:id])
+ projects = GroupProjectsFinder.new(group: group, current_user: current_user, params: project_finder_params).execute
+ projects = reorder_projects(projects)
+ paginate(projects)
+ end
+
+ def present_groups(params, groups)
+ options = {
+ with: Entities::Group,
+ current_user: current_user,
+ statistics: params[:statistics] && current_user.admin?
+ }
+
+ groups = groups.with_statistics if options[:statistics]
+ groups, options = with_custom_attributes(groups, options)
+
+ present paginate(groups), options
+ end
+ end
+
+ resource :groups do
+ include CustomAttributesEndpoints
+
+ desc 'Get a groups list' do
+ success Entities::Group
+ end
+ params do
+ use :group_list_params
+ use :with_custom_attributes
+ end
+ get do
+ groups = find_groups(params)
+ present_groups params, groups
end
desc 'Create a group. Available only for users who can create groups.' do
@@ -117,9 +146,20 @@ module API
desc 'Get a single group, with containing projects.' do
success Entities::GroupDetail
end
+ params do
+ use :with_custom_attributes
+ end
get ":id" do
group = find_group!(params[:id])
- present group, with: Entities::GroupDetail, current_user: current_user
+
+ options = {
+ with: Entities::GroupDetail,
+ current_user: current_user
+ }
+
+ group, options = with_custom_attributes(group, options)
+
+ present group, options
end
desc 'Remove a group.'
@@ -150,13 +190,31 @@ module API
optional :starred, type: Boolean, default: false, desc: 'Limit by starred status'
use :pagination
+ use :with_custom_attributes
end
get ":id/projects" do
- group = find_group!(params[:id])
- projects = GroupProjectsFinder.new(group: group, current_user: current_user, params: project_finder_params).execute
- projects = reorder_projects(projects)
- entity = params[:simple] ? Entities::BasicProjectDetails : Entities::Project
- present paginate(projects), with: entity, current_user: current_user
+ projects = find_group_projects(params)
+
+ options = {
+ with: params[:simple] ? Entities::BasicProjectDetails : Entities::Project,
+ current_user: current_user
+ }
+
+ projects, options = with_custom_attributes(projects, options)
+
+ present options[:with].prepare_relation(projects), options
+ end
+
+ desc 'Get a list of subgroups in this group.' do
+ success Entities::Group
+ end
+ params do
+ use :group_list_params
+ use :with_custom_attributes
+ end
+ get ":id/subgroups" do
+ groups = find_groups(params)
+ present_groups params, groups
end
desc 'Transfer a project to the group namespace. Available only for admin.' do
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index 00dbc2aee7a..e4fca77ab5d 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -5,6 +5,7 @@ module API
SUDO_HEADER = "HTTP_SUDO".freeze
SUDO_PARAM = :sudo
+ API_USER_ENV = 'gitlab.api.user'.freeze
def declared_params(options = {})
options = { include_parent_namespaces: false }.merge(options)
@@ -25,6 +26,7 @@ module API
check_unmodified_since!(last_updated)
status 204
+
if block_given?
yield resource
else
@@ -32,6 +34,11 @@ module API
end
end
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ # We can't rewrite this with StrongMemoize because `sudo!` would
+ # actually write to `@current_user`, and `sudo?` would immediately
+ # call `current_user` again which reads from `@current_user`.
+ # We should rewrite this in a way that using StrongMemoize is possible
def current_user
return @current_user if defined?(@current_user)
@@ -41,13 +48,26 @@ module API
sudo!
+ validate_access_token!(scopes: scopes_registered_for_endpoint) unless sudo?
+
+ save_current_user_in_env(@current_user) if @current_user
+
@current_user
end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
+
+ def save_current_user_in_env(user)
+ env[API_USER_ENV] = { user_id: user.id, username: user.username }
+ end
def sudo?
initial_current_user != current_user
end
+ def user_namespace
+ @user_namespace ||= find_namespace!(params[:id])
+ end
+
def user_group
@group ||= find_group!(params[:id])
end
@@ -57,13 +77,20 @@ module API
end
def wiki_page
- page = user_project.wiki.find_page(params[:slug])
+ page = ProjectWiki.new(user_project, current_user).find_page(params[:slug])
page || not_found!('Wiki Page')
end
- def available_labels
- @available_labels ||= LabelsFinder.new(current_user, project_id: user_project.id).execute
+ def available_labels_for(label_parent)
+ search_params =
+ if label_parent.is_a?(Project)
+ { project_id: label_parent.id }
+ else
+ { group_id: label_parent.id, only_group_labels: true }
+ end
+
+ LabelsFinder.new(current_user, search_params).execute
end
def find_user(id)
@@ -110,8 +137,28 @@ module API
end
end
+ def find_namespace(id)
+ if id.to_s =~ /^\d+$/
+ Namespace.find_by(id: id)
+ else
+ Namespace.find_by_full_path(id)
+ end
+ end
+
+ def find_namespace!(id)
+ namespace = find_namespace(id)
+
+ if can?(current_user, :read_namespace, namespace)
+ namespace
+ else
+ not_found!('Namespace')
+ end
+ end
+
def find_project_label(id)
- label = available_labels.find_by_id(id) || available_labels.find_by_title(id)
+ labels = available_labels_for(user_project)
+ label = labels.find_by_id(id) || labels.find_by_title(id)
+
label || not_found!('Label')
end
@@ -125,7 +172,7 @@ module API
def find_project_snippet(id)
finder_params = { project: user_project }
- SnippetsFinder.new(current_user, finder_params).execute.find(id)
+ SnippetsFinder.new(current_user, finder_params).find(id)
end
def find_merge_request_with_access(iid, access_level = :read_merge_request)
@@ -139,7 +186,7 @@ module API
end
def authenticate!
- unauthorized! unless current_user && can?(initial_current_user, :access_api)
+ unauthorized! unless current_user
end
def authenticate_non_get!
@@ -153,6 +200,11 @@ module API
end
end
+ def authenticated_with_full_private_access!
+ authenticate!
+ forbidden! unless current_user.full_private_access?
+ end
+
def authenticated_as_admin!
authenticate!
forbidden! unless current_user.admin?
@@ -184,6 +236,14 @@ module API
end
end
+ def require_pages_enabled!
+ not_found! unless user_project.pages_available?
+ end
+
+ def require_pages_config_enabled!
+ not_found! unless Gitlab.config.pages.enabled
+ end
+
def can?(object, action, subject = :global)
Ability.allowed?(object, action, subject)
end
@@ -285,7 +345,7 @@ module API
if sentry_enabled? && report_exception?(exception)
define_params_for_grape_middleware
sentry_context
- Raven.capture_exception(exception)
+ Raven.capture_exception(exception, extra: params)
end
# lifted from https://github.com/rails/rails/blob/master/actionpack/lib/action_dispatch/middleware/debug_exceptions.rb#L60
@@ -322,6 +382,7 @@ module API
finder_params[:archived] = params[:archived]
finder_params[:search] = params[:search] if params[:search]
finder_params[:user] = params.delete(:user) if params[:user]
+ finder_params[:custom_attributes] = params[:custom_attributes] if params[:custom_attributes]
finder_params
end
@@ -377,59 +438,37 @@ module API
private
- def private_token
- params[APIGuard::PRIVATE_TOKEN_PARAM] || env[APIGuard::PRIVATE_TOKEN_HEADER]
- end
-
- def warden
- env['warden']
- end
-
- # Check if the request is GET/HEAD, or if CSRF token is valid.
- def verified_request?
- Gitlab::RequestForgeryProtection.verified?(env)
- end
-
- # Check the Rails session for valid authentication details
- def find_user_from_warden
- warden.try(:authenticate) if verified_request?
- end
-
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
def initial_current_user
return @initial_current_user if defined?(@initial_current_user)
- Gitlab::Auth::UniqueIpsLimiter.limit_user! do
- @initial_current_user ||= find_user_by_private_token(scopes: scopes_registered_for_endpoint)
- @initial_current_user ||= doorkeeper_guard(scopes: scopes_registered_for_endpoint)
- @initial_current_user ||= find_user_from_warden
-
- unless @initial_current_user && Gitlab::UserAccess.new(@initial_current_user).allowed?
- @initial_current_user = nil
- end
- @initial_current_user
+ begin
+ @initial_current_user = Gitlab::Auth::UniqueIpsLimiter.limit_user! { find_current_user! }
+ rescue Gitlab::Auth::UnauthorizedError
+ unauthorized!
end
end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
def sudo!
return unless sudo_identifier
- return unless initial_current_user
+
+ unauthorized! unless initial_current_user
unless initial_current_user.admin?
forbidden!('Must be admin to use sudo')
end
- # Only private tokens should be used for the SUDO feature
- unless private_token == initial_current_user.private_token
- forbidden!('Private token must be specified in order to use sudo')
+ unless access_token
+ forbidden!('Must be authenticated using an OAuth or Personal Access Token to use sudo')
end
+ validate_access_token!(scopes: [:sudo])
+
sudoed_user = find_user(sudo_identifier)
+ not_found!("User with ID or username '#{sudo_identifier}'") unless sudoed_user
- if sudoed_user
- @current_user = sudoed_user
- else
- not_found!("No user id or username for: #{sudo_identifier}")
- end
+ @current_user = sudoed_user # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
def sudo_identifier
@@ -454,10 +493,12 @@ module API
header(*Gitlab::Workhorse.send_artifacts_entry(build, entry))
end
- # The Grape Error Middleware only has access to env but no params. We workaround this by
- # defining a method that returns the right value.
+ # The Grape Error Middleware only has access to `env` but not `params` nor
+ # `request`. We workaround this by defining methods that returns the right
+ # values.
def define_params_for_grape_middleware
- self.define_singleton_method(:params) { Rack::Request.new(env).params.symbolize_keys }
+ self.define_singleton_method(:request) { Rack::Request.new(env) }
+ self.define_singleton_method(:params) { request.params.symbolize_keys }
end
# We could get a Grape or a standard Ruby exception. We should only report anything that
@@ -467,22 +508,5 @@ module API
exception.status == 500
end
-
- # An array of scopes that were registered (using `allow_access_with_scope`)
- # for the current endpoint class. It also returns scopes registered on
- # `API::API`, since these are meant to apply to all API routes.
- def scopes_registered_for_endpoint
- @scopes_registered_for_endpoint ||=
- begin
- endpoint_classes = [options[:for].presence, ::API::API].compact
- endpoint_classes.reduce([]) do |memo, endpoint|
- if endpoint.respond_to?(:allowed_scopes)
- memo.concat(endpoint.allowed_scopes)
- else
- memo
- end
- end
- end
- end
end
end
diff --git a/lib/api/helpers/badges_helpers.rb b/lib/api/helpers/badges_helpers.rb
new file mode 100644
index 00000000000..1f8afbf3c90
--- /dev/null
+++ b/lib/api/helpers/badges_helpers.rb
@@ -0,0 +1,28 @@
+module API
+ module Helpers
+ module BadgesHelpers
+ include ::API::Helpers::MembersHelpers
+
+ def find_badge(source)
+ source.badges.find(params[:badge_id])
+ end
+
+ def present_badges(source, records, options = {})
+ entity_type = options[:with] || Entities::Badge
+ badge_params = badge_source_params(source).merge(with: entity_type)
+
+ present records, badge_params
+ end
+
+ def badge_source_params(source)
+ project = if source.is_a?(Project)
+ source
+ else
+ GroupProjectsFinder.new(group: source, current_user: current_user).execute.first
+ end
+
+ { project: project }
+ end
+ end
+ end
+end
diff --git a/lib/api/helpers/common_helpers.rb b/lib/api/helpers/common_helpers.rb
index 322624c6092..9993caa5249 100644
--- a/lib/api/helpers/common_helpers.rb
+++ b/lib/api/helpers/common_helpers.rb
@@ -3,8 +3,10 @@ module API
module CommonHelpers
def convert_parameters_from_legacy_format(params)
params.tap do |params|
- if params[:assignee_id].present?
- params[:assignee_ids] = [params.delete(:assignee_id)]
+ assignee_id = params.delete(:assignee_id)
+
+ if assignee_id.present?
+ params[:assignee_ids] = [assignee_id]
end
end
end
diff --git a/lib/api/helpers/custom_attributes.rb b/lib/api/helpers/custom_attributes.rb
new file mode 100644
index 00000000000..70e4eda95f8
--- /dev/null
+++ b/lib/api/helpers/custom_attributes.rb
@@ -0,0 +1,28 @@
+module API
+ module Helpers
+ module CustomAttributes
+ extend ActiveSupport::Concern
+
+ included do
+ helpers do
+ params :with_custom_attributes do
+ optional :with_custom_attributes, type: Boolean, default: false, desc: 'Include custom attributes in the response'
+ end
+
+ def with_custom_attributes(collection_or_resource, options = {})
+ options = options.merge(
+ with_custom_attributes: params[:with_custom_attributes] &&
+ can?(current_user, :read_custom_attribute)
+ )
+
+ if options[:with_custom_attributes] && collection_or_resource.is_a?(ActiveRecord::Relation)
+ collection_or_resource = collection_or_resource.includes(:custom_attributes)
+ end
+
+ [collection_or_resource, options]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/helpers/custom_validators.rb b/lib/api/helpers/custom_validators.rb
index 0a8f3073a50..dd4f6c41131 100644
--- a/lib/api/helpers/custom_validators.rb
+++ b/lib/api/helpers/custom_validators.rb
@@ -4,6 +4,7 @@ module API
class Absence < Grape::Validations::Base
def validate_param!(attr_name, params)
return if params.respond_to?(:key?) && !params.key?(attr_name)
+
raise Grape::Exceptions::Validation, params: [@scope.full_name(attr_name)], message: message(:absence)
end
end
diff --git a/lib/api/helpers/internal_helpers.rb b/lib/api/helpers/internal_helpers.rb
index 4c0db4d42b1..cd59da6fc70 100644
--- a/lib/api/helpers/internal_helpers.rb
+++ b/lib/api/helpers/internal_helpers.rb
@@ -1,23 +1,16 @@
module API
module Helpers
module InternalHelpers
- SSH_GITALY_FEATURES = {
- 'git-receive-pack' => :ssh_receive_pack,
- 'git-upload-pack' => :ssh_upload_pack
- }.freeze
+ attr_reader :redirected_path
def wiki?
- set_project unless defined?(@wiki)
- @wiki
+ set_project unless defined?(@wiki) # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ @wiki # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
def project
- set_project unless defined?(@project)
- @project
- end
-
- def redirected_path
- @redirected_path
+ set_project unless defined?(@project) # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ @project # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
def ssh_authentication_abilities
@@ -36,6 +29,18 @@ module API
{}
end
+ def fix_git_env_repository_paths(env, repository_path)
+ if obj_dir_relative = env['GIT_OBJECT_DIRECTORY_RELATIVE'].presence
+ env['GIT_OBJECT_DIRECTORY'] = File.join(repository_path, obj_dir_relative)
+ end
+
+ if alt_obj_dirs_relative = env['GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE'].presence
+ env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = alt_obj_dirs_relative.map { |dir| File.join(repository_path, dir) }
+ end
+
+ env
+ end
+
def log_user_activity(actor)
commands = Gitlab::GitAccess::DOWNLOAD_COMMANDS
@@ -55,8 +60,21 @@ module API
false
end
+ def project_path
+ project&.path || project_path_match[:project_path]
+ end
+
+ def namespace_path
+ project&.namespace&.full_path || project_path_match[:namespace_path]
+ end
+
private
+ def project_path_match
+ @project_path_match ||= params[:project].match(Gitlab::PathRegex.full_project_git_path_regex) || {}
+ end
+
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
def set_project
if params[:gl_repository]
@project, @wiki = Gitlab::GlRepository.parse(params[:gl_repository])
@@ -65,6 +83,7 @@ module API
@project, @wiki, @redirected_path = Gitlab::RepoPath.parse(params[:project])
end
end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
# Project id to pass between components that don't share/don't have
# access to the same filesystem mounts
@@ -90,8 +109,14 @@ module API
# Return the Gitaly Address if it is enabled
def gitaly_payload(action)
- feature = SSH_GITALY_FEATURES[action]
- return unless feature && Gitlab::GitalyClient.feature_enabled?(feature)
+ return unless %w[git-receive-pack git-upload-pack].include?(action)
+
+ if action == 'git-receive-pack'
+ return unless Gitlab::GitalyClient.feature_enabled?(
+ :ssh_receive_pack,
+ status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT
+ )
+ end
{
repository: repository.gitaly_repository,
diff --git a/lib/api/helpers/pagination.rb b/lib/api/helpers/pagination.rb
index 95108292aac..09805049169 100644
--- a/lib/api/helpers/pagination.rb
+++ b/lib/api/helpers/pagination.rb
@@ -2,6 +2,8 @@ module API
module Helpers
module Pagination
def paginate(relation)
+ relation = add_default_order(relation)
+
relation.page(params[:page]).per(params[:per_page]).tap do |data|
add_pagination_headers(data)
end
@@ -10,13 +12,16 @@ module API
private
def add_pagination_headers(paginated_data)
- header 'X-Total', paginated_data.total_count.to_s
- header 'X-Total-Pages', total_pages(paginated_data).to_s
header 'X-Per-Page', paginated_data.limit_value.to_s
header 'X-Page', paginated_data.current_page.to_s
header 'X-Next-Page', paginated_data.next_page.to_s
header 'X-Prev-Page', paginated_data.prev_page.to_s
header 'Link', pagination_links(paginated_data)
+
+ return if data_without_counts?(paginated_data)
+
+ header 'X-Total', paginated_data.total_count.to_s
+ header 'X-Total-Pages', total_pages(paginated_data).to_s
end
def pagination_links(paginated_data)
@@ -35,8 +40,10 @@ module API
request_params[:page] = 1
links << %(<#{request_url}?#{request_params.to_query}>; rel="first")
- request_params[:page] = total_pages(paginated_data)
- links << %(<#{request_url}?#{request_params.to_query}>; rel="last")
+ unless data_without_counts?(paginated_data)
+ request_params[:page] = total_pages(paginated_data)
+ links << %(<#{request_url}?#{request_params.to_query}>; rel="last")
+ end
links.join(', ')
end
@@ -45,6 +52,18 @@ module API
# Ensure there is in total at least 1 page
[paginated_data.total_pages, 1].max
end
+
+ def add_default_order(relation)
+ if relation.is_a?(ActiveRecord::Relation) && relation.order_values.empty?
+ relation = relation.order(:id)
+ end
+
+ relation
+ end
+
+ def data_without_counts?(paginated_data)
+ paginated_data.is_a?(Kaminari::PaginatableWithoutCount)
+ end
end
end
end
diff --git a/lib/api/helpers/runner.rb b/lib/api/helpers/runner.rb
index 282af32ca94..35ac0b4cbca 100644
--- a/lib/api/helpers/runner.rb
+++ b/lib/api/helpers/runner.rb
@@ -1,46 +1,34 @@
module API
module Helpers
module Runner
- include Gitlab::CurrentSettings
-
JOB_TOKEN_HEADER = 'HTTP_JOB_TOKEN'.freeze
JOB_TOKEN_PARAM = :token
- UPDATE_RUNNER_EVERY = 10 * 60
def runner_registration_token_valid?
ActiveSupport::SecurityUtils.variable_size_secure_compare(params[:token],
- current_application_settings.runners_registration_token)
- end
-
- def get_runner_version_from_params
- return unless params['info'].present?
- attributes_for_keys(%w(name version revision platform architecture), params['info'])
+ Gitlab::CurrentSettings.runners_registration_token)
end
def authenticate_runner!
forbidden! unless current_runner
- end
- def current_runner
- @runner ||= ::Ci::Runner.find_by_token(params[:token].to_s)
+ current_runner
+ .update_cached_info(get_runner_details_from_request)
end
- def update_runner_info
- return unless update_runner?
+ def get_runner_details_from_request
+ return get_runner_ip unless params['info'].present?
- current_runner.contacted_at = Time.now
- current_runner.assign_attributes(get_runner_version_from_params)
- current_runner.save if current_runner.changed?
+ attributes_for_keys(%w(name version revision platform architecture), params['info'])
+ .merge(get_runner_ip)
end
- def update_runner?
- # Use a random threshold to prevent beating DB updates.
- # It generates a distribution between [40m, 80m].
- #
- contacted_at_max_age = UPDATE_RUNNER_EVERY + Random.rand(UPDATE_RUNNER_EVERY)
+ def get_runner_ip
+ { ip_address: request.ip }
+ end
- current_runner.contacted_at.nil? ||
- (Time.now - current_runner.contacted_at) >= contacted_at_max_age
+ def current_runner
+ @runner ||= ::Ci::Runner.find_by_token(params[:token].to_s)
end
def validate_job!(job)
@@ -69,7 +57,7 @@ module API
end
def max_artifacts_size
- current_application_settings.max_artifacts_size.megabytes.to_i
+ Gitlab::CurrentSettings.max_artifacts_size.megabytes.to_i
end
end
end
diff --git a/lib/api/internal.rb b/lib/api/internal.rb
index c0fef56378f..b3660e4a1d0 100644
--- a/lib/api/internal.rb
+++ b/lib/api/internal.rb
@@ -4,6 +4,7 @@ module API
before { authenticate_by_gitlab_shell_token! }
helpers ::API::Helpers::InternalHelpers
+ helpers ::Gitlab::Identifier
namespace 'internal' do
# Check if git command is allowed to project
@@ -12,14 +13,16 @@ module API
# key_id - ssh key id for Git over SSH
# user_id - user id for Git over HTTP
# protocol - Git access protocol being used, e.g. HTTP or SSH
- # project - project path with namespace
+ # project - project full_path (not path on disk)
# action - git action (git-upload-pack or git-receive-pack)
# changes - changes as "oldrev newrev ref", see Gitlab::ChangesList
post "/allowed" do
status 200
# Stores some Git-specific env thread-safely
- Gitlab::Git::Env.set(parse_env)
+ env = parse_env
+ env = fix_git_env_repository_paths(env, repository_path) if project
+ Gitlab::Git::Env.set(env)
actor =
if params[:key_id]
@@ -31,13 +34,22 @@ module API
protocol = params[:protocol]
actor.update_last_used_at if actor.is_a?(Key)
+ user =
+ if actor.is_a?(Key)
+ actor.user
+ else
+ actor
+ end
access_checker_klass = wiki? ? Gitlab::GitAccessWiki : Gitlab::GitAccess
- access_checker = access_checker_klass
- .new(actor, project, protocol, authentication_abilities: ssh_authentication_abilities, redirected_path: redirected_path)
+ access_checker = access_checker_klass.new(actor, project,
+ protocol, authentication_abilities: ssh_authentication_abilities,
+ namespace_path: namespace_path, project_path: project_path,
+ redirected_path: redirected_path)
begin
access_checker.check(params[:action], params[:changes])
+ @project ||= access_checker.project
rescue Gitlab::GitAccess::UnauthorizedError, Gitlab::GitAccess::NotFoundError => e
return { status: false, message: e.message }
end
@@ -47,6 +59,7 @@ module API
{
status: true,
gl_repository: gl_repository,
+ gl_username: user&.username,
repository_path: repository_path,
gitaly: gitaly_payload(params[:action])
}
@@ -72,6 +85,18 @@ module API
end
#
+ # Get a ssh key using the fingerprint
+ #
+ get "/authorized_keys" do
+ fingerprint = params.fetch(:fingerprint) do
+ Gitlab::InsecureKeyFingerprint.new(params.fetch(:key)).fingerprint
+ end
+ key = Key.find_by(fingerprint: fingerprint)
+ not_found!("Key") if key.nil?
+ present key, with: Entities::SSHKey
+ end
+
+ #
# Discover user by ssh key or user id
#
get "/discover" do
@@ -81,6 +106,7 @@ module API
elsif params[:user_id]
user = User.find_by(id: params[:user_id])
end
+
present user, with: Entities::UserSafe
end
@@ -136,7 +162,7 @@ module API
codes = nil
- ::Users::UpdateService.new(user).execute! do |user|
+ ::Users::UpdateService.new(current_user, user: user).execute! do |user|
codes = user.generate_otp_backup_codes!
end
@@ -167,17 +193,31 @@ module API
post '/post_receive' do
status 200
-
PostReceive.perform_async(params[:gl_repository], params[:identifier],
params[:changes])
broadcast_message = BroadcastMessage.current&.last&.message
reference_counter_decreased = Gitlab::ReferenceCounter.new(params[:gl_repository]).decrease
- {
+ output = {
merge_request_urls: merge_request_urls,
broadcast_message: broadcast_message,
reference_counter_decreased: reference_counter_decreased
}
+
+ project = Gitlab::GlRepository.parse(params[:gl_repository]).first
+ user = identify(params[:identifier])
+
+ # A user is not guaranteed to be returned; an orphaned write deploy
+ # key could be used
+ if user
+ redirect_message = Gitlab::Checks::ProjectMoved.fetch_message(user.id, project.id)
+ project_created_message = Gitlab::Checks::ProjectCreated.fetch_message(user.id, project.id)
+
+ output[:redirected_message] = redirect_message if redirect_message
+ output[:project_created_message] = project_created_message if project_created_message
+ end
+
+ output
end
end
end
diff --git a/lib/api/issues.rb b/lib/api/issues.rb
index 1729df2aad0..f74b3b26802 100644
--- a/lib/api/issues.rb
+++ b/lib/api/issues.rb
@@ -8,7 +8,7 @@ module API
helpers do
def find_issues(args = {})
- args = params.merge(args)
+ args = declared_params.merge(args)
args.delete(:id)
args[:milestone_title] = args.delete(:milestone)
@@ -32,6 +32,8 @@ module API
optional :search, type: String, desc: 'Search issues for text present in the title or description'
optional :created_after, type: DateTime, desc: 'Return issues created after the specified time'
optional :created_before, type: DateTime, desc: 'Return issues created before the specified time'
+ optional :updated_after, type: DateTime, desc: 'Return issues updated after the specified time'
+ optional :updated_before, type: DateTime, desc: 'Return issues updated before the specified time'
optional :author_id, type: Integer, desc: 'Return issues which are authored by the user with the given ID'
optional :assignee_id, type: Integer, desc: 'Return issues which are assigned to the user with the given ID'
optional :scope, type: String, values: %w[created-by-me assigned-to-me all],
@@ -48,6 +50,7 @@ module API
optional :labels, type: String, desc: 'Comma-separated list of label names'
optional :due_date, type: String, desc: 'Date string in the format YEAR-MONTH-DAY'
optional :confidential, type: Boolean, desc: 'Boolean parameter if the issue should be confidential'
+ optional :discussion_locked, type: Boolean, desc: " Boolean parameter indicating if the issue's discussion is locked"
end
params :issue_params do
@@ -67,7 +70,7 @@ module API
desc: 'Return issues for the given scope: `created-by-me`, `assigned-to-me` or `all`'
end
get do
- issues = find_issues
+ issues = paginate(find_issues)
options = {
with: Entities::IssueBasic,
@@ -75,7 +78,7 @@ module API
issuable_metadata: issuable_meta_data(issues, 'Issue')
}
- present paginate(issues), options
+ present issues, options
end
end
@@ -94,7 +97,7 @@ module API
get ":id/issues" do
group = find_group!(params[:id])
- issues = find_issues(group_id: group.id)
+ issues = paginate(find_issues(group_id: group.id))
options = {
with: Entities::IssueBasic,
@@ -102,7 +105,7 @@ module API
issuable_metadata: issuable_meta_data(issues, 'Issue')
}
- present paginate(issues), options
+ present issues, options
end
end
@@ -123,7 +126,7 @@ module API
get ":id/issues" do
project = find_project!(params[:id])
- issues = find_issues(project_id: project.id)
+ issues = paginate(find_issues(project_id: project.id))
options = {
with: Entities::IssueBasic,
@@ -132,7 +135,7 @@ module API
issuable_metadata: issuable_meta_data(issues, 'Issue')
}
- present paginate(issues), options
+ present issues, options
end
desc 'Get a single project issue' do
@@ -160,6 +163,10 @@ module API
use :issue_params
end
post ':id/issues' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42320')
+
+ authorize! :create_issue, user_project
+
# Setting created_at time only allowed for admins and project owners
unless current_user.admin? || user_project.owner == current_user
params.delete(:created_at)
@@ -172,6 +179,7 @@ module API
issue = ::Issues::CreateService.new(user_project,
current_user,
issue_params.merge(request: request, api: true)).execute
+
if issue.spam?
render_api_error!({ error: 'Spam detected' }, 400)
end
@@ -193,10 +201,12 @@ module API
desc: 'Date time when the issue was updated. Available only for admins and project owners.'
optional :state_event, type: String, values: %w[reopen close], desc: 'State of the issue'
use :issue_params
- at_least_one_of :title, :description, :assignee_ids, :assignee_id, :milestone_id,
+ at_least_one_of :title, :description, :assignee_ids, :assignee_id, :milestone_id, :discussion_locked,
:labels, :created_at, :due_date, :confidential, :state_event
end
put ':id/issues/:issue_iid' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42322')
+
issue = user_project.issues.find_by!(iid: params.delete(:issue_iid))
authorize! :update_issue, issue
@@ -230,6 +240,8 @@ module API
requires :to_project_id, type: Integer, desc: 'The ID of the new project'
end
post ':id/issues/:issue_iid/move' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42323')
+
issue = user_project.issues.find_by(iid: params[:issue_iid])
not_found!('Issue') unless issue
@@ -254,7 +266,9 @@ module API
authorize!(:destroy_issue, issue)
- destroy_conditionally!(issue)
+ destroy_conditionally!(issue) do |issue|
+ Issuable::DestroyService.new(user_project, current_user).execute(issue)
+ end
end
desc 'List merge requests closing issue' do
@@ -272,6 +286,19 @@ module API
present paginate(merge_requests), with: Entities::MergeRequestBasic, current_user: current_user, project: user_project
end
+ desc 'List participants for an issue' do
+ success Entities::UserBasic
+ end
+ params do
+ requires :issue_iid, type: Integer, desc: 'The internal ID of a project issue'
+ end
+ get ':id/issues/:issue_iid/participants' do
+ issue = find_project_issue(params[:issue_iid])
+ participants = ::Kaminari.paginate_array(issue.participants)
+
+ present paginate(participants), with: Entities::UserBasic, current_user: current_user, project: user_project
+ end
+
desc 'Get the user agent details for an issue' do
success Entities::UserAgentDetail
end
diff --git a/lib/api/job_artifacts.rb b/lib/api/job_artifacts.rb
index 2a8fa7659bf..47e5eeab31d 100644
--- a/lib/api/job_artifacts.rb
+++ b/lib/api/job_artifacts.rb
@@ -2,20 +2,28 @@ module API
class JobArtifacts < Grape::API
before { authenticate_non_get! }
+ # EE::API::JobArtifacts would override the following helpers
+ helpers do
+ def authorize_download_artifacts!
+ authorize_read_builds!
+ end
+ end
+
params do
requires :id, type: String, desc: 'The ID of a project'
end
resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
- desc 'Download the artifacts file from a job' do
+ desc 'Download the artifacts archive from a job' do
detail 'This feature was introduced in GitLab 8.10'
end
params do
requires :ref_name, type: String, desc: 'The ref from repository'
requires :job, type: String, desc: 'The name for the job'
end
+ route_setting :authentication, job_token_allowed: true
get ':id/jobs/artifacts/:ref_name/download',
requirements: { ref_name: /.+/ } do
- authorize_read_builds!
+ authorize_download_artifacts!
builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job])
@@ -23,14 +31,15 @@ module API
present_artifacts!(latest_build.artifacts_file)
end
- desc 'Download the artifacts file from a job' do
+ desc 'Download the artifacts archive from a job' do
detail 'This feature was introduced in GitLab 8.5'
end
params do
requires :job_id, type: Integer, desc: 'The ID of a job'
end
+ route_setting :authentication, job_token_allowed: true
get ':id/jobs/:job_id/artifacts' do
- authorize_read_builds!
+ authorize_download_artifacts!
build = find_build!(params[:job_id])
diff --git a/lib/api/jobs.rb b/lib/api/jobs.rb
index 3c1c412ba42..9c205514b3a 100644
--- a/lib/api/jobs.rb
+++ b/lib/api/jobs.rb
@@ -38,6 +38,7 @@ module API
builds = user_project.builds.order('id DESC')
builds = filter_builds(builds, params[:scope])
+ builds = builds.preload(:user, :job_artifacts_archive, :runner, pipeline: :project)
present paginate(builds), with: Entities::Job
end
@@ -136,7 +137,7 @@ module API
authorize_update_builds!
build = find_build!(params[:job_id])
- authorize!(:update_build, build)
+ authorize!(:erase_build, build)
return forbidden!('Job is not erasable!') unless build.erasable?
build.erase(erased_by: current_user)
diff --git a/lib/api/labels.rb b/lib/api/labels.rb
index e41a1720ac1..81eaf56e48e 100644
--- a/lib/api/labels.rb
+++ b/lib/api/labels.rb
@@ -15,7 +15,7 @@ module API
use :pagination
end
get ':id/labels' do
- present paginate(available_labels), with: Entities::Label, current_user: current_user, project: user_project
+ present paginate(available_labels_for(user_project)), with: Entities::Label, current_user: current_user, project: user_project
end
desc 'Create a new label' do
@@ -30,7 +30,7 @@ module API
post ':id/labels' do
authorize! :admin_label, user_project
- label = available_labels.find_by(title: params[:name])
+ label = available_labels_for(user_project).find_by(title: params[:name])
conflict!('Label already exists') if label
priority = params.delete(:priority)
diff --git a/lib/api/members.rb b/lib/api/members.rb
index 22e4bdead41..8b12986d09e 100644
--- a/lib/api/members.rb
+++ b/lib/api/members.rb
@@ -21,10 +21,11 @@ module API
get ":id/members" do
source = find_source(source_type, params[:id])
- users = source.users
- users = users.merge(User.search(params[:query])) if params[:query]
+ members = source.members.where.not(user_id: nil).includes(:user)
+ members = members.joins(:user).merge(User.search(params[:query])) if params[:query].present?
+ members = paginate(members)
- present paginate(users), with: Entities::Member, source: source
+ present members, with: Entities::Member
end
desc 'Gets a member of a group or project.' do
@@ -39,7 +40,7 @@ module API
members = source.members
member = members.find_by!(user_id: params[:user_id])
- present member.user, with: Entities::Member, member: member
+ present member, with: Entities::Member
end
desc 'Adds a member to a group or project.' do
@@ -59,8 +60,10 @@ module API
member = source.add_user(params[:user_id], params[:access_level], current_user: current_user, expires_at: params[:expires_at])
- if member.persisted? && member.valid?
- present member.user, with: Entities::Member, member: member
+ if !member
+ not_allowed! # This currently can only be reached in EE
+ elsif member.persisted? && member.valid?
+ present member, with: Entities::Member
else
render_validation_error!(member)
end
@@ -78,12 +81,16 @@ module API
source = find_source(source_type, params.delete(:id))
authorize_admin_source!(source_type, source)
- member = source.members.find_by!(user_id: params.delete(:user_id))
+ member = source.members.find_by!(user_id: params[:user_id])
+ updated_member =
+ ::Members::UpdateService
+ .new(current_user, declared_params(include_missing: false))
+ .execute(member)
- if member.update_attributes(declared_params(include_missing: false))
- present member.user, with: Entities::Member, member: member
+ if updated_member.valid?
+ present updated_member, with: Entities::Member
else
- render_validation_error!(member)
+ render_validation_error!(updated_member)
end
end
@@ -96,7 +103,7 @@ module API
member = source.members.find_by!(user_id: params[:user_id])
destroy_conditionally!(member) do
- ::Members::DestroyService.new(source, current_user, declared_params).execute
+ ::Members::DestroyService.new(current_user).execute(member)
end
end
end
diff --git a/lib/api/merge_requests.rb b/lib/api/merge_requests.rb
index 8aa1e0216ee..ead1bb7957b 100644
--- a/lib/api/merge_requests.rb
+++ b/lib/api/merge_requests.rb
@@ -6,9 +6,35 @@ module API
helpers ::Gitlab::IssuableMetadata
+ # EE::API::MergeRequests would override the following helpers
+ helpers do
+ params :optional_params_ee do
+ end
+
+ params :merge_params_ee do
+ end
+
+ def update_merge_request_ee(merge_request)
+ end
+ end
+
+ def self.update_params_at_least_one_of
+ %i[
+ assignee_id
+ description
+ labels
+ milestone_id
+ remove_source_branch
+ state_event
+ target_branch
+ title
+ discussion_locked
+ ]
+ end
+
helpers do
def find_merge_requests(args = {})
- args = params.merge(args)
+ args = declared_params.merge(args)
args[:milestone_title] = args.delete(:milestone)
args[:label_name] = args.delete(:labels)
@@ -21,7 +47,20 @@ module API
return merge_requests if args[:view] == 'simple'
merge_requests
- .preload(:notes, :author, :assignee, :milestone, :merge_request_diff, :labels, :timelogs)
+ .preload(:notes, :author, :assignee, :milestone, :latest_merge_request_diff, :labels, :timelogs)
+ end
+
+ def merge_request_pipelines_with_access
+ authorize! :read_pipeline, user_project
+
+ mr = find_merge_request_with_access(params[:merge_request_iid])
+ mr.all_pipelines
+ end
+
+ def check_sha_param!(params, merge_request)
+ if params[:sha] && merge_request.diff_head_sha != params[:sha]
+ render_api_error!("SHA does not match HEAD of source branch: #{merge_request.diff_head_sha}", 409)
+ end
end
params :merge_requests_params do
@@ -35,12 +74,17 @@ module API
optional :labels, type: String, desc: 'Comma-separated list of label names'
optional :created_after, type: DateTime, desc: 'Return merge requests created after the specified time'
optional :created_before, type: DateTime, desc: 'Return merge requests created before the specified time'
+ optional :updated_after, type: DateTime, desc: 'Return merge requests updated after the specified time'
+ optional :updated_before, type: DateTime, desc: 'Return merge requests updated before the specified time'
optional :view, type: String, values: %w[simple], desc: 'If simple, returns the `iid`, URL, title, description, and basic state of merge request'
optional :author_id, type: Integer, desc: 'Return merge requests which are authored by the user with the given ID'
optional :assignee_id, type: Integer, desc: 'Return merge requests which are assigned to the user with the given ID'
optional :scope, type: String, values: %w[created-by-me assigned-to-me all],
desc: 'Return merge requests for the given scope: `created-by-me`, `assigned-to-me` or `all`'
optional :my_reaction_emoji, type: String, desc: 'Return issues reacted by the authenticated user by the given emoji'
+ optional :source_branch, type: String, desc: 'Return merge requests with the given source branch'
+ optional :target_branch, type: String, desc: 'Return merge requests with the given target branch'
+ optional :search, type: String, desc: 'Search merge requests for text present in the title or description'
use :pagination
end
end
@@ -94,16 +138,14 @@ module API
render_api_error!(errors, 400)
end
- params :optional_params_ce do
+ params :optional_params do
optional :description, type: String, desc: 'The description of the merge request'
optional :assignee_id, type: Integer, desc: 'The ID of a user to assign the merge request'
optional :milestone_id, type: Integer, desc: 'The ID of a milestone to assign the merge request'
optional :labels, type: String, desc: 'Comma-separated list of label names'
optional :remove_source_branch, type: Boolean, desc: 'Remove source branch when merging'
- end
- params :optional_params do
- use :optional_params_ce
+ use :optional_params_ee
end
end
@@ -144,6 +186,8 @@ module API
use :optional_params
end
post ":id/merge_requests" do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42316')
+
authorize! :create_merge_request, user_project
mr_params = declared_params(include_missing: false)
@@ -167,7 +211,9 @@ module API
authorize!(:destroy_merge_request, merge_request)
- destroy_conditionally!(merge_request)
+ destroy_conditionally!(merge_request) do |merge_request|
+ Issuable::DestroyService.new(user_project, current_user).execute(merge_request)
+ end
end
params do
@@ -182,14 +228,24 @@ module API
present merge_request, with: Entities::MergeRequest, current_user: current_user, project: user_project
end
+ desc 'Get the participants of a merge request' do
+ success Entities::UserBasic
+ end
+ get ':id/merge_requests/:merge_request_iid/participants' do
+ merge_request = find_merge_request_with_access(params[:merge_request_iid])
+ participants = ::Kaminari.paginate_array(merge_request.participants)
+
+ present paginate(participants), with: Entities::UserBasic
+ end
+
desc 'Get the commits of a merge request' do
- success Entities::RepoCommit
+ success Entities::Commit
end
get ':id/merge_requests/:merge_request_iid/commits' do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
commits = ::Kaminari.paginate_array(merge_request.commits)
- present paginate(commits), with: Entities::RepoCommit
+ present paginate(commits), with: Entities::Commit
end
desc 'Show the merge request changes' do
@@ -198,33 +254,34 @@ module API
get ':id/merge_requests/:merge_request_iid/changes' do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
- present merge_request, with: Entities::MergeRequestChanges, current_user: current_user
+ present merge_request, with: Entities::MergeRequestChanges, current_user: current_user, project: user_project
+ end
+
+ desc 'Get the merge request pipelines' do
+ success Entities::PipelineBasic
+ end
+ get ':id/merge_requests/:merge_request_iid/pipelines' do
+ pipelines = merge_request_pipelines_with_access
+
+ present paginate(pipelines), with: Entities::PipelineBasic
end
desc 'Update a merge request' do
success Entities::MergeRequest
end
params do
- # CE
- at_least_one_of_ce = [
- :assignee_id,
- :description,
- :labels,
- :milestone_id,
- :remove_source_branch,
- :state_event,
- :target_branch,
- :title
- ]
optional :title, type: String, allow_blank: false, desc: 'The title of the merge request'
optional :target_branch, type: String, allow_blank: false, desc: 'The target branch'
optional :state_event, type: String, values: %w[close reopen],
desc: 'Status of the merge request'
+ optional :discussion_locked, type: Boolean, desc: 'Whether the MR discussion is locked'
use :optional_params
- at_least_one_of(*at_least_one_of_ce)
+ at_least_one_of(*::API::MergeRequests.update_params_at_least_one_of)
end
put ':id/merge_requests/:merge_request_iid' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42318')
+
merge_request = find_merge_request_with_access(params.delete(:merge_request_iid), :update_merge_request)
mr_params = declared_params(include_missing: false)
@@ -243,15 +300,18 @@ module API
success Entities::MergeRequest
end
params do
- # CE
optional :merge_commit_message, type: String, desc: 'Custom merge commit message'
optional :should_remove_source_branch, type: Boolean,
desc: 'When true, the source branch will be deleted if possible'
optional :merge_when_pipeline_succeeds, type: Boolean,
desc: 'When true, this merge request will be merged when the pipeline succeeds'
optional :sha, type: String, desc: 'When present, must have the HEAD SHA of the source branch'
+
+ use :merge_params_ee
end
put ':id/merge_requests/:merge_request_iid/merge' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42317')
+
merge_request = find_project_merge_request(params[:merge_request_iid])
merge_when_pipeline_succeeds = to_boolean(params[:merge_when_pipeline_succeeds])
@@ -263,9 +323,9 @@ module API
render_api_error!('Branch cannot be merged', 406) unless merge_request.mergeable?(skip_ci_check: merge_when_pipeline_succeeds)
- if params[:sha] && merge_request.diff_head_sha != params[:sha]
- render_api_error!("SHA does not match HEAD of source branch: #{merge_request.diff_head_sha}", 409)
- end
+ check_sha_param!(params, merge_request)
+
+ update_merge_request_ee(merge_request)
merge_params = {
commit_message: params[:merge_commit_message],
@@ -293,7 +353,7 @@ module API
unauthorized! unless merge_request.can_cancel_merge_when_pipeline_succeeds?(current_user)
- ::MergeRequest::MergeWhenPipelineSucceedsService
+ ::MergeRequests::MergeWhenPipelineSucceedsService
.new(merge_request.target_project, current_user)
.cancel(merge_request)
end
diff --git a/lib/api/namespaces.rb b/lib/api/namespaces.rb
index f1eaff6b0eb..32b77aedba8 100644
--- a/lib/api/namespaces.rb
+++ b/lib/api/namespaces.rb
@@ -19,6 +19,16 @@ module API
present paginate(namespaces), with: Entities::Namespace, current_user: current_user
end
+
+ desc 'Get a namespace by ID' do
+ success Entities::Namespace
+ end
+ params do
+ requires :id, type: String, desc: "Namespace's ID or path"
+ end
+ get ':id' do
+ present user_namespace, with: Entities::Namespace, current_user: current_user
+ end
end
end
end
diff --git a/lib/api/notes.rb b/lib/api/notes.rb
index d6e7203adaf..3588dc85c9e 100644
--- a/lib/api/notes.rb
+++ b/lib/api/notes.rb
@@ -18,6 +18,10 @@ module API
end
params do
requires :noteable_id, type: Integer, desc: 'The ID of the noteable'
+ optional :order_by, type: String, values: %w[created_at updated_at], default: 'created_at',
+ desc: 'Return notes ordered by `created_at` or `updated_at` fields.'
+ optional :sort, type: String, values: %w[asc desc], default: 'desc',
+ desc: 'Return notes sorted in `asc` or `desc` order.'
use :pagination
end
get ":id/#{noteables_str}/:noteable_id/notes" do
@@ -29,11 +33,12 @@ module API
# at the DB query level (which we cannot in that case), the current
# page can have less elements than :per_page even if
# there's more than one page.
+ raw_notes = noteable.notes.with_metadata.reorder(params[:order_by] => params[:sort])
notes =
# paginate() only works with a relation. This could lead to a
# mismatch between the pagination headers info and the actual notes
# array returned, but this is really a edge-case.
- paginate(noteable.notes)
+ paginate(raw_notes)
.reject { |n| n.cross_reference_not_visible_for?(current_user) }
present notes, with: Entities::Note
else
@@ -50,7 +55,7 @@ module API
end
get ":id/#{noteables_str}/:noteable_id/notes/:note_id" do
noteable = find_project_noteable(noteables_str, params[:noteable_id])
- note = noteable.notes.find(params[:note_id])
+ note = noteable.notes.with_metadata.find(params[:note_id])
can_read_note = can?(current_user, noteable_read_ability_name(noteable), noteable) && !note.cross_reference_not_visible_for?(current_user)
if can_read_note
@@ -78,6 +83,8 @@ module API
}
if can?(current_user, noteable_read_ability_name(noteable), noteable)
+ authorize! :create_note, noteable
+
if params[:created_at] && (current_user.admin? || user_project.owner == current_user)
opts[:created_at] = params[:created_at]
end
diff --git a/lib/api/notification_settings.rb b/lib/api/notification_settings.rb
index bcc0833aa5c..0266bf2f717 100644
--- a/lib/api/notification_settings.rb
+++ b/lib/api/notification_settings.rb
@@ -35,7 +35,7 @@ module API
new_notification_email = params.delete(:notification_email)
if new_notification_email
- ::Users::UpdateService.new(current_user, notification_email: new_notification_email).execute
+ ::Users::UpdateService.new(current_user, user: current_user, notification_email: new_notification_email).execute
end
notification_setting.update(declared_params(include_missing: false))
diff --git a/lib/api/pages_domains.rb b/lib/api/pages_domains.rb
new file mode 100644
index 00000000000..ba33993d852
--- /dev/null
+++ b/lib/api/pages_domains.rb
@@ -0,0 +1,139 @@
+module API
+ class PagesDomains < Grape::API
+ include PaginationParams
+
+ PAGES_DOMAINS_ENDPOINT_REQUIREMENTS = API::PROJECT_ENDPOINT_REQUIREMENTS.merge(domain: API::NO_SLASH_URL_PART_REGEX)
+
+ before do
+ authenticate!
+ end
+
+ after_validation do
+ normalize_params_file_to_string
+ end
+
+ helpers do
+ def find_pages_domain!
+ user_project.pages_domains.find_by(domain: params[:domain]) || not_found!('PagesDomain')
+ end
+
+ def pages_domain
+ @pages_domain ||= find_pages_domain!
+ end
+
+ def normalize_params_file_to_string
+ params.each do |k, v|
+ if v.is_a?(Hash) && v.key?(:tempfile)
+ params[k] = v[:tempfile].to_a.join('')
+ end
+ end
+ end
+ end
+
+ resource :pages do
+ before do
+ require_pages_config_enabled!
+ authenticated_with_full_private_access!
+ end
+
+ desc "Get all pages domains" do
+ success Entities::PagesDomainBasic
+ end
+ params do
+ use :pagination
+ end
+ get "domains" do
+ present paginate(PagesDomain.all), with: Entities::PagesDomainBasic
+ end
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
+ before do
+ require_pages_enabled!
+ end
+
+ desc 'Get all pages domains' do
+ success Entities::PagesDomain
+ end
+ params do
+ use :pagination
+ end
+ get ":id/pages/domains" do
+ authorize! :read_pages, user_project
+
+ present paginate(user_project.pages_domains.order(:domain)), with: Entities::PagesDomain
+ end
+
+ desc 'Get a single pages domain' do
+ success Entities::PagesDomain
+ end
+ params do
+ requires :domain, type: String, desc: 'The domain'
+ end
+ get ":id/pages/domains/:domain", requirements: PAGES_DOMAINS_ENDPOINT_REQUIREMENTS do
+ authorize! :read_pages, user_project
+
+ present pages_domain, with: Entities::PagesDomain
+ end
+
+ desc 'Create a new pages domain' do
+ success Entities::PagesDomain
+ end
+ params do
+ requires :domain, type: String, desc: 'The domain'
+ optional :certificate, allow_blank: false, types: [File, String], desc: 'The certificate'
+ optional :key, allow_blank: false, types: [File, String], desc: 'The key'
+ all_or_none_of :certificate, :key
+ end
+ post ":id/pages/domains" do
+ authorize! :update_pages, user_project
+
+ pages_domain_params = declared(params, include_parent_namespaces: false)
+ pages_domain = user_project.pages_domains.create(pages_domain_params)
+
+ if pages_domain.persisted?
+ present pages_domain, with: Entities::PagesDomain
+ else
+ render_validation_error!(pages_domain)
+ end
+ end
+
+ desc 'Updates a pages domain'
+ params do
+ requires :domain, type: String, desc: 'The domain'
+ optional :certificate, allow_blank: false, types: [File, String], desc: 'The certificate'
+ optional :key, allow_blank: false, types: [File, String], desc: 'The key'
+ end
+ put ":id/pages/domains/:domain", requirements: PAGES_DOMAINS_ENDPOINT_REQUIREMENTS do
+ authorize! :update_pages, user_project
+
+ pages_domain_params = declared(params, include_parent_namespaces: false)
+
+ # Remove empty private key if certificate is not empty.
+ if pages_domain_params[:certificate] && !pages_domain_params[:key]
+ pages_domain_params.delete(:key)
+ end
+
+ if pages_domain.update(pages_domain_params)
+ present pages_domain, with: Entities::PagesDomain
+ else
+ render_validation_error!(pages_domain)
+ end
+ end
+
+ desc 'Delete a pages domain'
+ params do
+ requires :domain, type: String, desc: 'The domain'
+ end
+ delete ":id/pages/domains/:domain", requirements: PAGES_DOMAINS_ENDPOINT_REQUIREMENTS do
+ authorize! :update_pages, user_project
+
+ status 204
+ pages_domain.destroy
+ end
+ end
+ end
+end
diff --git a/lib/api/pipelines.rb b/lib/api/pipelines.rb
index 74b3376a1f3..d2b8b832e4e 100644
--- a/lib/api/pipelines.rb
+++ b/lib/api/pipelines.rb
@@ -42,12 +42,15 @@ module API
requires :ref, type: String, desc: 'Reference'
end
post ':id/pipeline' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42124')
+
authorize! :create_pipeline, user_project
new_pipeline = Ci::CreatePipelineService.new(user_project,
current_user,
declared_params(include_missing: false))
.execute(:api, ignore_skip_ci: true, save_on_errors: false)
+
if new_pipeline.persisted?
present new_pipeline, with: Entities::Pipeline
else
diff --git a/lib/api/project_export.rb b/lib/api/project_export.rb
new file mode 100644
index 00000000000..6ec2626df1a
--- /dev/null
+++ b/lib/api/project_export.rb
@@ -0,0 +1,41 @@
+module API
+ class ProjectExport < Grape::API
+ before do
+ not_found! unless Gitlab::CurrentSettings.project_export_enabled?
+ authorize_admin_project
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ resource :projects, requirements: { id: %r{[^/]+} } do
+ desc 'Get export status' do
+ detail 'This feature was introduced in GitLab 10.6.'
+ success Entities::ProjectExportStatus
+ end
+ get ':id/export' do
+ present user_project, with: Entities::ProjectExportStatus
+ end
+
+ desc 'Download export' do
+ detail 'This feature was introduced in GitLab 10.6.'
+ end
+ get ':id/export/download' do
+ path = user_project.export_project_path
+
+ render_api_error!('404 Not found or has expired', 404) unless path
+
+ present_file!(path, File.basename(path), 'application/gzip')
+ end
+
+ desc 'Start export' do
+ detail 'This feature was introduced in GitLab 10.6.'
+ end
+ post ':id/export' do
+ user_project.add_export_job(current_user: current_user)
+
+ accepted!
+ end
+ end
+ end
+end
diff --git a/lib/api/project_hooks.rb b/lib/api/project_hooks.rb
index 86066e2b58f..f82241058e5 100644
--- a/lib/api/project_hooks.rb
+++ b/lib/api/project_hooks.rb
@@ -10,6 +10,7 @@ module API
requires :url, type: String, desc: "The URL to send the request to"
optional :push_events, type: Boolean, desc: "Trigger hook on push events"
optional :issues_events, type: Boolean, desc: "Trigger hook on issues events"
+ optional :confidential_issues_events, type: Boolean, desc: "Trigger hook on confidential issues events"
optional :merge_requests_events, type: Boolean, desc: "Trigger hook on merge request events"
optional :tag_push_events, type: Boolean, desc: "Trigger hook on tag push events"
optional :note_events, type: Boolean, desc: "Trigger hook on note(comment) events"
diff --git a/lib/api/project_import.rb b/lib/api/project_import.rb
new file mode 100644
index 00000000000..a509c1f32c1
--- /dev/null
+++ b/lib/api/project_import.rb
@@ -0,0 +1,69 @@
+module API
+ class ProjectImport < Grape::API
+ include PaginationParams
+
+ helpers do
+ def import_params
+ declared_params(include_missing: false)
+ end
+
+ def file_is_valid?
+ import_params[:file] && import_params[:file]['tempfile'].respond_to?(:read)
+ end
+
+ def validate_file!
+ render_api_error!('The file is invalid', 400) unless file_is_valid?
+ end
+ end
+
+ before do
+ forbidden! unless Gitlab::CurrentSettings.import_sources.include?('gitlab_project')
+ end
+
+ resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
+ params do
+ requires :path, type: String, desc: 'The new project path and name'
+ requires :file, type: File, desc: 'The project export file to be imported'
+ optional :namespace, type: String, desc: "The ID or name of the namespace that the project will be imported into. Defaults to the current user's namespace."
+ end
+ desc 'Create a new project import' do
+ detail 'This feature was introduced in GitLab 10.6.'
+ success Entities::ProjectImportStatus
+ end
+ post 'import' do
+ validate_file!
+
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42437')
+
+ namespace = if import_params[:namespace]
+ find_namespace!(import_params[:namespace])
+ else
+ current_user.namespace
+ end
+
+ project_params = {
+ path: import_params[:path],
+ namespace_id: namespace.id,
+ file: import_params[:file]['tempfile']
+ }
+
+ project = ::Projects::GitlabProjectsImportService.new(current_user, project_params).execute
+
+ render_api_error!(project.errors.full_messages&.first, 400) unless project.saved?
+
+ present project, with: Entities::ProjectImportStatus
+ end
+
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ end
+ desc 'Get a project export status' do
+ detail 'This feature was introduced in GitLab 10.6.'
+ success Entities::ProjectImportStatus
+ end
+ get ':id/import' do
+ present user_project, with: Entities::ProjectImportStatus
+ end
+ end
+ end
+end
diff --git a/lib/api/project_milestones.rb b/lib/api/project_milestones.rb
index 0cb209a02d0..306dc0e63d7 100644
--- a/lib/api/project_milestones.rb
+++ b/lib/api/project_milestones.rb
@@ -60,6 +60,15 @@ module API
update_milestone_for(user_project)
end
+ desc 'Remove a project milestone'
+ delete ":id/milestones/:milestone_id" do
+ authorize! :admin_milestone, user_project
+
+ user_project.milestones.find(params[:milestone_id]).destroy
+
+ status(204)
+ end
+
desc 'Get all issues for a single project milestone' do
success Entities::IssueBasic
end
diff --git a/lib/api/project_snippets.rb b/lib/api/project_snippets.rb
index 2ccda1c1aa1..39c03c40bab 100644
--- a/lib/api/project_snippets.rb
+++ b/lib/api/project_snippets.rb
@@ -13,6 +13,7 @@ module API
if errors[:project_access].any?
error!(errors[:project_access], 422)
end
+
not_found!
end
@@ -142,7 +143,7 @@ module API
get ":id/snippets/:snippet_id/user_agent_detail" do
authenticated_as_admin!
- snippet = Snippet.find_by!(id: params[:id])
+ snippet = Snippet.find_by!(id: params[:snippet_id], project_id: params[:id])
return not_found!('UserAgentDetail') unless snippet.user_agent_detail
diff --git a/lib/api/projects.rb b/lib/api/projects.rb
index aab7a6c3f93..b552b0e0c5d 100644
--- a/lib/api/projects.rb
+++ b/lib/api/projects.rb
@@ -3,6 +3,7 @@ require_dependency 'declarative_policy'
module API
class Projects < Grape::API
include PaginationParams
+ include Helpers::CustomAttributes
before { authenticate_non_get! }
@@ -76,14 +77,15 @@ module API
def present_projects(projects, options = {})
projects = reorder_projects(projects)
- projects = projects.with_statistics if params[:statistics]
- projects = projects.with_issues_enabled if params[:with_issues_enabled]
+ projects = projects.with_issues_available_for_user(current_user) if params[:with_issues_enabled]
projects = projects.with_merge_requests_enabled if params[:with_merge_requests_enabled]
+ projects = projects.with_statistics if params[:statistics]
+ projects = paginate(projects)
+ projects, options = with_custom_attributes(projects, options)
if current_user
- projects = projects.includes(:route, :taggings, namespace: :route)
- project_members = current_user.project_members
- group_members = current_user.group_members
+ project_members = current_user.project_members.preload(:source, user: [notification_settings: :source])
+ group_members = current_user.group_members.preload(:source, user: [notification_settings: :source])
end
options = options.reverse_merge(
@@ -95,7 +97,7 @@ module API
)
options[:with] = Entities::BasicProjectDetails if params[:simple]
- present paginate(projects), options
+ present options[:with].prepare_relation(projects, options), options
end
end
@@ -107,6 +109,7 @@ module API
requires :user_id, type: String, desc: 'The ID or username of the user'
use :collection_params
use :statistics_params
+ use :with_custom_attributes
end
get ":user_id/projects" do
user = find_user(params[:user_id])
@@ -119,12 +122,15 @@ module API
end
resource :projects do
+ include CustomAttributesEndpoints
+
desc 'Get a list of visible projects for authenticated user' do
success Entities::BasicProjectDetails
end
params do
use :collection_params
use :statistics_params
+ use :with_custom_attributes
end
get do
present_projects load_projects
@@ -152,6 +158,7 @@ module API
if project.errors[:limit_reached].present?
error!(project.errors[:limit_reached], 403)
end
+
render_validation_error!(project)
end
end
@@ -193,11 +200,19 @@ module API
end
params do
use :statistics_params
+ use :with_custom_attributes
end
get ":id" do
- entity = current_user ? Entities::ProjectWithAccess : Entities::BasicProjectDetails
- present user_project, with: entity, current_user: current_user,
- user_can_admin_project: can?(current_user, :admin_project, user_project), statistics: params[:statistics]
+ options = {
+ with: current_user ? Entities::ProjectWithAccess : Entities::BasicProjectDetails,
+ current_user: current_user,
+ user_can_admin_project: can?(current_user, :admin_project, user_project),
+ statistics: params[:statistics]
+ }
+
+ project, options = with_custom_attributes(user_project, options)
+
+ present project, options
end
desc 'Fork new project for the current user or provided namespace.' do
@@ -207,6 +222,8 @@ module API
optional :namespace, type: String, desc: 'The ID or name of the namespace that the project will be forked into'
end
post ':id/fork' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42284')
+
fork_params = declared_params(include_missing: false)
namespace_id = fork_params[:namespace]
@@ -237,6 +254,7 @@ module API
end
params do
use :collection_params
+ use :with_custom_attributes
end
get ':id/forks' do
forks = ForkProjectsFinder.new(user_project, params: project_finder_params, current_user: current_user).execute
@@ -253,6 +271,7 @@ module API
[
:jobs_enabled,
:resolve_outdated_diff_discussions,
+ :ci_config_path,
:container_registry_enabled,
:default_branch,
:description,
@@ -365,15 +384,16 @@ module API
post ":id/fork/:forked_from_id" do
authenticated_as_admin!
- forked_from_project = find_project!(params[:forked_from_id])
- not_found!("Source Project") unless forked_from_project
+ fork_from_project = find_project!(params[:forked_from_id])
+
+ not_found!("Source Project") unless fork_from_project
- if user_project.forked_from_project.nil?
- user_project.create_forked_project_link(forked_to_project_id: user_project.id, forked_from_project_id: forked_from_project.id)
+ result = ::Projects::ForkService.new(fork_from_project, current_user).execute(user_project)
- ::Projects::ForksCountService.new(forked_from_project).refresh_cache
+ if result
+ present user_project.reload, with: Entities::Project
else
- render_api_error!("Project already forked", 409)
+ render_api_error!("Project already forked", 409) if user_project.forked?
end
end
@@ -381,11 +401,11 @@ module API
delete ":id/fork" do
authorize! :remove_fork_project, user_project
- if user_project.forked?
- destroy_conditionally!(user_project.forked_project_link)
- else
- not_modified!
+ result = destroy_conditionally!(user_project) do
+ ::Projects::UnlinkForkService.new(user_project, current_user).execute
end
+
+ result ? status(204) : not_modified!
end
desc 'Share the project with a group' do
diff --git a/lib/api/projects_relation_builder.rb b/lib/api/projects_relation_builder.rb
new file mode 100644
index 00000000000..6482fd94ab8
--- /dev/null
+++ b/lib/api/projects_relation_builder.rb
@@ -0,0 +1,34 @@
+module API
+ module ProjectsRelationBuilder
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ def prepare_relation(projects_relation, options = {})
+ projects_relation = preload_relation(projects_relation, options)
+ execute_batch_counting(projects_relation)
+ projects_relation
+ end
+
+ def preload_relation(projects_relation, options = {})
+ projects_relation
+ end
+
+ def forks_counting_projects(projects_relation)
+ projects_relation
+ end
+
+ def batch_forks_counting(projects_relation)
+ ::Projects::BatchForksCountService.new(forks_counting_projects(projects_relation)).refresh_cache
+ end
+
+ def batch_open_issues_counting(projects_relation)
+ ::Projects::BatchOpenIssuesCountService.new(projects_relation).refresh_cache
+ end
+
+ def execute_batch_counting(projects_relation)
+ batch_forks_counting(projects_relation)
+ batch_open_issues_counting(projects_relation)
+ end
+ end
+ end
+end
diff --git a/lib/api/protected_branches.rb b/lib/api/protected_branches.rb
index 15fcb9e8e27..c15c487deb4 100644
--- a/lib/api/protected_branches.rb
+++ b/lib/api/protected_branches.rb
@@ -2,7 +2,7 @@ module API
class ProtectedBranches < Grape::API
include PaginationParams
- BRANCH_ENDPOINT_REQUIREMENTS = API::PROJECT_ENDPOINT_REQUIREMENTS.merge(branch: API::NO_SLASH_URL_PART_REGEX)
+ BRANCH_ENDPOINT_REQUIREMENTS = API::PROJECT_ENDPOINT_REQUIREMENTS.merge(name: API::NO_SLASH_URL_PART_REGEX)
before { authorize_admin_project }
@@ -39,11 +39,11 @@ module API
end
params do
requires :name, type: String, desc: 'The name of the protected branch'
- optional :push_access_level, type: Integer, default: Gitlab::Access::MASTER,
- values: ProtectedBranchAccess::ALLOWED_ACCESS_LEVELS,
+ optional :push_access_level, type: Integer,
+ values: ProtectedRefAccess::ALLOWED_ACCESS_LEVELS,
desc: 'Access levels allowed to push (defaults: `40`, master access level)'
- optional :merge_access_level, type: Integer, default: Gitlab::Access::MASTER,
- values: ProtectedBranchAccess::ALLOWED_ACCESS_LEVELS,
+ optional :merge_access_level, type: Integer,
+ values: ProtectedRefAccess::ALLOWED_ACCESS_LEVELS,
desc: 'Access levels allowed to merge (defaults: `40`, master access level)'
end
post ':id/protected_branches' do
@@ -52,15 +52,13 @@ module API
conflict!("Protected branch '#{params[:name]}' already exists")
end
- protected_branch_params = {
- name: params[:name],
- push_access_levels_attributes: [{ access_level: params[:push_access_level] }],
- merge_access_levels_attributes: [{ access_level: params[:merge_access_level] }]
- }
+ # Replace with `declared(params)` after updating to grape v1.0.2
+ # See https://github.com/ruby-grape/grape/pull/1710
+ # and https://gitlab.com/gitlab-org/gitlab-ce/issues/40843
+ declared_params = params.slice("name", "push_access_level", "merge_access_level", "allowed_to_push", "allowed_to_merge")
- service_args = [user_project, current_user, protected_branch_params]
-
- protected_branch = ::ProtectedBranches::CreateService.new(*service_args).execute
+ api_service = ::ProtectedBranches::ApiService.new(user_project, current_user, declared_params)
+ protected_branch = api_service.create
if protected_branch.persisted?
present protected_branch, with: Entities::ProtectedBranch, project: user_project
diff --git a/lib/api/repositories.rb b/lib/api/repositories.rb
index 2255fb1b70d..9638c53a1df 100644
--- a/lib/api/repositories.rb
+++ b/lib/api/repositories.rb
@@ -15,6 +15,7 @@ module API
if errors[:project_access].any?
error!(errors[:project_access], 422)
end
+
not_found!
end
@@ -35,7 +36,7 @@ module API
end
desc 'Get a project repository tree' do
- success Entities::RepoTreeObject
+ success Entities::TreeObject
end
params do
optional :ref, type: String, desc: 'The name of a repository branch or tag, if not given the default branch is used'
@@ -52,12 +53,12 @@ module API
tree = user_project.repository.tree(commit.id, path, recursive: params[:recursive])
entries = ::Kaminari.paginate_array(tree.sorted_entries)
- present paginate(entries), with: Entities::RepoTreeObject
+ present paginate(entries), with: Entities::TreeObject
end
desc 'Get raw blob contents from the repository'
params do
- requires :sha, type: String, desc: 'The commit, branch name, or tag name'
+ requires :sha, type: String, desc: 'The commit hash'
end
get ':id/repository/blobs/:sha/raw' do
assign_blob_vars!
@@ -67,7 +68,7 @@ module API
desc 'Get a blob from the repository'
params do
- requires :sha, type: String, desc: 'The commit, branch name, or tag name'
+ requires :sha, type: String, desc: 'The commit hash'
end
get ':id/repository/blobs/:sha' do
assign_blob_vars!
@@ -110,10 +111,12 @@ module API
end
params do
use :pagination
+ optional :order_by, type: String, values: %w[email name commits], default: nil, desc: 'Return contributors ordered by `name` or `email` or `commits`'
+ optional :sort, type: String, values: %w[asc desc], default: nil, desc: 'Sort by asc (ascending) or desc (descending)'
end
get ':id/repository/contributors' do
begin
- contributors = ::Kaminari.paginate_array(user_project.repository.contributors)
+ contributors = ::Kaminari.paginate_array(user_project.repository.contributors(order_by: params[:order_by], sort: params[:sort]))
present paginate(contributors), with: Entities::Contributor
rescue
not_found!
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index a3987c560dd..7e6c33ec33d 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -16,7 +16,8 @@ module API
optional :tag_list, type: Array[String], desc: %q(List of Runner's tags)
end
post '/' do
- attributes = attributes_for_keys [:description, :locked, :run_untagged, :tag_list]
+ attributes = attributes_for_keys([:description, :locked, :run_untagged, :tag_list])
+ .merge(get_runner_details_from_request)
runner =
if runner_registration_token_valid?
@@ -30,7 +31,6 @@ module API
return forbidden! unless runner
if runner.id
- runner.update(get_runner_version_from_params)
present runner, with: Entities::RunnerRegistrationDetails
else
not_found!
@@ -78,7 +78,6 @@ module API
post '/request' do
authenticate_runner!
no_content! unless current_runner.active?
- update_runner_info
if current_runner.runner_queue_value_latest?(params[:last_update])
header 'X-GitLab-Last-Update', params[:last_update]
@@ -205,6 +204,7 @@ module API
optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse))
optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse))
+ optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file)
optional 'metadata.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'metadata.name', type: String, desc: %q(filename (generated by Workhorse))
end
@@ -215,18 +215,20 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
- artifacts_upload_path = ArtifactUploader.artifacts_upload_path
- artifacts = uploaded_file(:file, artifacts_upload_path)
- metadata = uploaded_file(:metadata, artifacts_upload_path)
+ workhorse_upload_path = JobArtifactUploader.workhorse_upload_path
+ artifacts = uploaded_file(:file, workhorse_upload_path)
+ metadata = uploaded_file(:metadata, workhorse_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
- job.artifacts_file = artifacts
- job.artifacts_metadata = metadata
- job.artifacts_expire_in = params['expire_in'] ||
+ expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
+ job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, file_sha256: params['file.sha256'], expire_in: expire_in)
+ job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, expire_in: expire_in) if metadata
+ job.artifacts_expire_in = expire_in
+
if job.save
present job, with: Entities::JobRequest::Response
else
diff --git a/lib/api/runners.rb b/lib/api/runners.rb
index d3559ef71be..996457c5dfe 100644
--- a/lib/api/runners.rb
+++ b/lib/api/runners.rb
@@ -84,6 +84,23 @@ module API
destroy_conditionally!(runner)
end
+
+ desc 'List jobs running on a runner' do
+ success Entities::JobBasicWithProject
+ end
+ params do
+ requires :id, type: Integer, desc: 'The ID of the runner'
+ optional :status, type: String, desc: 'Status of the job', values: Ci::Build::AVAILABLE_STATUSES
+ use :pagination
+ end
+ get ':id/jobs' do
+ runner = get_runner(params[:id])
+ authenticate_list_runners_jobs!(runner)
+
+ jobs = RunnerJobsFinder.new(runner, params).execute
+
+ present paginate(jobs), with: Entities::JobBasicWithProject
+ end
end
params do
@@ -165,17 +182,20 @@ module API
def authenticate_show_runner!(runner)
return if runner.is_shared || current_user.admin?
+
forbidden!("No access granted") unless user_can_access_runner?(runner)
end
def authenticate_update_runner!(runner)
return if current_user.admin?
+
forbidden!("Runner is shared") if runner.is_shared?
forbidden!("No access granted") unless user_can_access_runner?(runner)
end
def authenticate_delete_runner!(runner)
return if current_user.admin?
+
forbidden!("Runner is shared") if runner.is_shared?
forbidden!("Runner associated with more than one project") if runner.projects.count > 1
forbidden!("No access granted") unless user_can_access_runner?(runner)
@@ -185,6 +205,13 @@ module API
forbidden!("Runner is shared") if runner.is_shared?
forbidden!("Runner is locked") if runner.locked?
return if current_user.admin?
+
+ forbidden!("No access granted") unless user_can_access_runner?(runner)
+ end
+
+ def authenticate_list_runners_jobs!(runner)
+ return if current_user.admin?
+
forbidden!("No access granted") unless user_can_access_runner?(runner)
end
diff --git a/lib/api/search.rb b/lib/api/search.rb
new file mode 100644
index 00000000000..3556ad98c52
--- /dev/null
+++ b/lib/api/search.rb
@@ -0,0 +1,111 @@
+module API
+ class Search < Grape::API
+ include PaginationParams
+
+ before { authenticate! }
+
+ helpers do
+ SCOPE_ENTITY = {
+ merge_requests: Entities::MergeRequestBasic,
+ issues: Entities::IssueBasic,
+ projects: Entities::BasicProjectDetails,
+ milestones: Entities::Milestone,
+ notes: Entities::Note,
+ commits: Entities::CommitDetail,
+ blobs: Entities::Blob,
+ wiki_blobs: Entities::Blob,
+ snippet_titles: Entities::Snippet,
+ snippet_blobs: Entities::Snippet
+ }.freeze
+
+ def search(additional_params = {})
+ search_params = {
+ scope: params[:scope],
+ search: params[:search],
+ snippets: snippets?,
+ page: params[:page],
+ per_page: params[:per_page]
+ }.merge(additional_params)
+
+ results = SearchService.new(current_user, search_params).search_objects
+
+ process_results(results)
+ end
+
+ def process_results(results)
+ case params[:scope]
+ when 'wiki_blobs'
+ paginate(results).map { |blob| Gitlab::ProjectSearchResults.parse_search_result(blob, user_project) }
+ when 'blobs'
+ paginate(results).map { |blob| blob[1] }
+ else
+ paginate(results)
+ end
+ end
+
+ def snippets?
+ %w(snippet_blobs snippet_titles).include?(params[:scope]).to_s
+ end
+
+ def entity
+ SCOPE_ENTITY[params[:scope].to_sym]
+ end
+ end
+
+ resource :search do
+ desc 'Search on GitLab' do
+ detail 'This feature was introduced in GitLab 10.5.'
+ end
+ params do
+ requires :search, type: String, desc: 'The expression it should be searched for'
+ requires :scope,
+ type: String,
+ desc: 'The scope of search, available scopes:
+ projects, issues, merge_requests, milestones, snippet_titles, snippet_blobs',
+ values: %w(projects issues merge_requests milestones snippet_titles snippet_blobs)
+ use :pagination
+ end
+ get do
+ present search, with: entity
+ end
+ end
+
+ resource :groups, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
+ desc 'Search on GitLab' do
+ detail 'This feature was introduced in GitLab 10.5.'
+ end
+ params do
+ requires :id, type: String, desc: 'The ID of a group'
+ requires :search, type: String, desc: 'The expression it should be searched for'
+ requires :scope,
+ type: String,
+ desc: 'The scope of search, available scopes:
+ projects, issues, merge_requests, milestones',
+ values: %w(projects issues merge_requests milestones)
+ use :pagination
+ end
+ get ':id/-/search' do
+ present search(group_id: user_group.id), with: entity
+ end
+ end
+
+ resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
+ desc 'Search on GitLab' do
+ detail 'This feature was introduced in GitLab 10.5.'
+ end
+ params do
+ requires :id, type: String, desc: 'The ID of a project'
+ requires :search, type: String, desc: 'The expression it should be searched for'
+ requires :scope,
+ type: String,
+ desc: 'The scope of search, available scopes:
+ issues, merge_requests, milestones, notes, wiki_blobs, commits, blobs',
+ values: %w(issues merge_requests milestones notes wiki_blobs commits blobs)
+ use :pagination
+ end
+ get ':id/-/search' do
+ present search(project_id: user_project.id), with: entity
+ end
+ end
+ end
+end
diff --git a/lib/api/services.rb b/lib/api/services.rb
index 2cbd0517dc3..6c97659166d 100644
--- a/lib/api/services.rb
+++ b/lib/api/services.rb
@@ -1,5 +1,144 @@
+# frozen_string_literal: true
module API
class Services < Grape::API
+ CHAT_NOTIFICATION_SETTINGS = [
+ {
+ required: true,
+ name: :webhook,
+ type: String,
+ desc: 'The chat webhook'
+ },
+ {
+ required: false,
+ name: :username,
+ type: String,
+ desc: 'The chat username'
+ },
+ {
+ required: false,
+ name: :channel,
+ type: String,
+ desc: 'The default chat channel'
+ }
+ ].freeze
+
+ CHAT_NOTIFICATION_FLAGS = [
+ {
+ required: false,
+ name: :notify_only_broken_pipelines,
+ type: Boolean,
+ desc: 'Send notifications for broken pipelines'
+ },
+ {
+ required: false,
+ name: :notify_only_default_branch,
+ type: Boolean,
+ desc: 'Send notifications only for the default branch'
+ }
+ ].freeze
+
+ CHAT_NOTIFICATION_CHANNELS = [
+ {
+ required: false,
+ name: :push_channel,
+ type: String,
+ desc: 'The name of the channel to receive push_events notifications'
+ },
+ {
+ required: false,
+ name: :issue_channel,
+ type: String,
+ desc: 'The name of the channel to receive issues_events notifications'
+ },
+ {
+ required: false,
+ name: :confidential_issue_channel,
+ type: String,
+ desc: 'The name of the channel to receive confidential_issues_events notifications'
+ },
+ {
+ required: false,
+ name: :merge_request_channel,
+ type: String,
+ desc: 'The name of the channel to receive merge_requests_events notifications'
+ },
+ {
+ required: false,
+ name: :note_channel,
+ type: String,
+ desc: 'The name of the channel to receive note_events notifications'
+ },
+ {
+ required: false,
+ name: :tag_push_channel,
+ type: String,
+ desc: 'The name of the channel to receive tag_push_events notifications'
+ },
+ {
+ required: false,
+ name: :pipeline_channel,
+ type: String,
+ desc: 'The name of the channel to receive pipeline_events notifications'
+ },
+ {
+ required: false,
+ name: :wiki_page_channel,
+ type: String,
+ desc: 'The name of the channel to receive wiki_page_events notifications'
+ }
+ ].freeze
+
+ CHAT_NOTIFICATION_EVENTS = [
+ {
+ required: false,
+ name: :push_events,
+ type: Boolean,
+ desc: 'Enable notifications for push_events'
+ },
+ {
+ required: false,
+ name: :issues_events,
+ type: Boolean,
+ desc: 'Enable notifications for issues_events'
+ },
+ {
+ required: false,
+ name: :confidential_issues_events,
+ type: Boolean,
+ desc: 'Enable notifications for confidential_issues_events'
+ },
+ {
+ required: false,
+ name: :merge_requests_events,
+ type: Boolean,
+ desc: 'Enable notifications for merge_requests_events'
+ },
+ {
+ required: false,
+ name: :note_events,
+ type: Boolean,
+ desc: 'Enable notifications for note_events'
+ },
+ {
+ required: false,
+ name: :tag_push_events,
+ type: Boolean,
+ desc: 'Enable notifications for tag_push_events'
+ },
+ {
+ required: false,
+ name: :pipeline_events,
+ type: Boolean,
+ desc: 'Enable notifications for pipeline_events'
+ },
+ {
+ required: false,
+ name: :wiki_page_events,
+ type: Boolean,
+ desc: 'Enable notifications for wiki_page_events'
+ }
+ ].freeze
+
services = {
'asana' => [
{
@@ -313,13 +452,13 @@ module API
desc: 'The base URL to the JIRA instance API. Web URL value will be used if not set. E.g., https://jira-api.example.com'
},
{
- required: false,
+ required: true,
name: :username,
type: String,
desc: 'The username of the user created to be used with GitLab/JIRA'
},
{
- required: false,
+ required: true,
name: :password,
type: String,
desc: 'The password of the user created to be used with GitLab/JIRA'
@@ -374,6 +513,26 @@ module API
desc: 'The Slack token'
}
],
+ 'packagist' => [
+ {
+ required: true,
+ name: :username,
+ type: String,
+ desc: 'The username'
+ },
+ {
+ required: true,
+ name: :token,
+ type: String,
+ desc: 'The Packagist API token'
+ },
+ {
+ required: false,
+ name: :server,
+ type: String,
+ desc: 'The server'
+ }
+ ],
'pipelines-email' => [
{
required: true,
@@ -469,25 +628,11 @@ module API
}
],
'slack' => [
- {
- required: true,
- name: :webhook,
- type: String,
- desc: 'The Slack webhook. e.g. https://hooks.slack.com/services/...'
- },
- {
- required: false,
- name: :new_issue_url,
- type: String,
- desc: 'The user name'
- },
- {
- required: false,
- name: :channel,
- type: String,
- desc: 'The channel name'
- }
- ],
+ CHAT_NOTIFICATION_SETTINGS,
+ CHAT_NOTIFICATION_FLAGS,
+ CHAT_NOTIFICATION_CHANNELS,
+ CHAT_NOTIFICATION_EVENTS
+ ].flatten,
'microsoft-teams' => [
{
required: true,
@@ -497,13 +642,11 @@ module API
}
],
'mattermost' => [
- {
- required: true,
- name: :webhook,
- type: String,
- desc: 'The Mattermost webhook. e.g. http://mattermost_host/hooks/...'
- }
- ],
+ CHAT_NOTIFICATION_SETTINGS,
+ CHAT_NOTIFICATION_FLAGS,
+ CHAT_NOTIFICATION_CHANNELS,
+ CHAT_NOTIFICATION_EVENTS
+ ].flatten,
'teamcity' => [
{
required: true,
@@ -551,6 +694,7 @@ module API
KubernetesService,
MattermostSlashCommandsService,
SlackSlashCommandsService,
+ PackagistService,
PipelinesEmailService,
PivotaltrackerService,
PrometheusService,
@@ -581,7 +725,22 @@ module API
]
end
- trigger_services = {
+ SERVICES = services.freeze
+ SERVICE_CLASSES = service_classes.freeze
+
+ SERVICE_CLASSES.each do |service|
+ event_names = service.try(:event_names) || next
+ event_names.each do |event_name|
+ SERVICES[service.to_param.tr("_", "-")] << {
+ required: false,
+ name: event_name.to_sym,
+ type: String,
+ desc: ServicesHelper.service_event_description(event_name)
+ }
+ end
+ end
+
+ TRIGGER_SERVICES = {
'mattermost-slash-commands' => [
{
name: :token,
@@ -613,22 +772,9 @@ module API
end
end
- services.each do |service_slug, settings|
+ SERVICES.each do |service_slug, settings|
desc "Set #{service_slug} service for project"
params do
- service_classes.each do |service|
- event_names = service.try(:event_names) || next
- event_names.each do |event_name|
- services[service.to_param.tr("_", "-")] << {
- required: false,
- name: event_name.to_sym,
- type: String,
- desc: ServicesHelper.service_event_description(event_name)
- }
- end
- end
- services.freeze
-
settings.each do |setting|
if setting[:required]
requires setting[:name], type: setting[:type], desc: setting[:desc]
@@ -642,7 +788,7 @@ module API
service_params = declared_params(include_missing: false).merge(active: true)
if service.update_attributes(service_params)
- present service, with: Entities::ProjectService, include_passwords: current_user.admin?
+ present service, with: Entities::ProjectService
else
render_api_error!('400 Bad Request', 400)
end
@@ -651,7 +797,7 @@ module API
desc "Delete a service for project"
params do
- requires :service_slug, type: String, values: services.keys, desc: 'The name of the service'
+ requires :service_slug, type: String, values: SERVICES.keys, desc: 'The name of the service'
end
delete ":id/services/:service_slug" do
service = user_project.find_or_initialize_service(params[:service_slug].underscore)
@@ -671,7 +817,7 @@ module API
success Entities::ProjectService
end
params do
- requires :service_slug, type: String, values: services.keys, desc: 'The name of the service'
+ requires :service_slug, type: String, values: SERVICES.keys, desc: 'The name of the service'
end
get ":id/services/:service_slug" do
service = user_project.find_or_initialize_service(params[:service_slug].underscore)
@@ -679,7 +825,7 @@ module API
end
end
- trigger_services.each do |service_slug, settings|
+ TRIGGER_SERVICES.each do |service_slug, settings|
helpers do
def slash_command_service(project, service_slug, params)
project.services.active.where(template: false).find do |service|
diff --git a/lib/api/session.rb b/lib/api/session.rb
deleted file mode 100644
index 016415c3023..00000000000
--- a/lib/api/session.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-module API
- class Session < Grape::API
- desc 'Login to get token' do
- success Entities::UserWithPrivateDetails
- end
- params do
- optional :login, type: String, desc: 'The username'
- optional :email, type: String, desc: 'The email of the user'
- requires :password, type: String, desc: 'The password of the user'
- at_least_one_of :login, :email
- end
- post "/session" do
- user = Gitlab::Auth.find_with_user_password(params[:email] || params[:login], params[:password])
-
- return unauthorized! unless user
- return render_api_error!('401 Unauthorized. You have 2FA enabled. Please use a personal access token to access the API', 401) if user.two_factor_enabled?
- present user, with: Entities::UserWithPrivateDetails
- end
- end
-end
diff --git a/lib/api/settings.rb b/lib/api/settings.rb
index 851b226e9e5..152df23a327 100644
--- a/lib/api/settings.rb
+++ b/lib/api/settings.rb
@@ -44,9 +44,11 @@ module API
requires :domain_blacklist, type: String, desc: 'Users with e-mail addresses that match these domain(s) will NOT be able to sign-up. Wildcards allowed. Use separate lines for multiple entries. Ex: domain.com, *.domain.com'
end
optional :after_sign_up_text, type: String, desc: 'Text shown after sign up'
- optional :password_authentication_enabled, type: Boolean, desc: 'Flag indicating if password authentication is enabled'
- optional :signin_enabled, type: Boolean, desc: 'Flag indicating if password authentication is enabled'
- mutually_exclusive :password_authentication_enabled, :signin_enabled
+ optional :password_authentication_enabled_for_web, type: Boolean, desc: 'Flag indicating if password authentication is enabled for the web interface'
+ optional :password_authentication_enabled, type: Boolean, desc: 'Flag indicating if password authentication is enabled for the web interface' # support legacy names, can be removed in v5
+ optional :signin_enabled, type: Boolean, desc: 'Flag indicating if password authentication is enabled for the web interface' # support legacy names, can be removed in v5
+ mutually_exclusive :password_authentication_enabled_for_web, :password_authentication_enabled, :signin_enabled
+ optional :password_authentication_enabled_for_git, type: Boolean, desc: 'Flag indicating if password authentication is enabled for Git over HTTP(S)'
optional :require_two_factor_authentication, type: Boolean, desc: 'Require all users to setup Two-factor authentication'
given require_two_factor_authentication: ->(val) { val } do
requires :two_factor_grace_period, type: Integer, desc: 'Amount of time (in hours) that users are allowed to skip forced configuration of two-factor authentication'
@@ -121,6 +123,9 @@ module API
end
optional :terminal_max_session_time, type: Integer, desc: 'Maximum time for web terminal websocket connection (in seconds). Set to 0 for unlimited time.'
optional :polling_interval_multiplier, type: BigDecimal, desc: 'Interval multiplier used by endpoints that perform polling. Set to 0 to disable polling.'
+ optional :gitaly_timeout_default, type: Integer, desc: 'Default Gitaly timeout, in seconds. Set to 0 to disable timeouts.'
+ optional :gitaly_timeout_medium, type: Integer, desc: 'Medium Gitaly timeout, in seconds. Set to 0 to disable timeouts.'
+ optional :gitaly_timeout_fast, type: Integer, desc: 'Gitaly fast operation timeout, in seconds. Set to 0 to disable timeouts.'
ApplicationSetting::SUPPORTED_KEY_TYPES.each do |type|
optional :"#{type}_key_restriction",
@@ -135,11 +140,14 @@ module API
put "application/settings" do
attrs = declared_params(include_missing: false)
+ # support legacy names, can be removed in v5
if attrs.has_key?(:signin_enabled)
- attrs[:password_authentication_enabled] = attrs.delete(:signin_enabled)
+ attrs[:password_authentication_enabled_for_web] = attrs.delete(:signin_enabled)
+ elsif attrs.has_key?(:password_authentication_enabled)
+ attrs[:password_authentication_enabled_for_web] = attrs.delete(:password_authentication_enabled)
end
- if current_settings.update_attributes(attrs)
+ if ApplicationSettings::UpdateService.new(current_settings, current_user, attrs).execute
present current_settings, with: Entities::ApplicationSetting
else
render_validation_error!(current_settings)
diff --git a/lib/api/snippets.rb b/lib/api/snippets.rb
index 00eb7c60f16..c736cc32021 100644
--- a/lib/api/snippets.rb
+++ b/lib/api/snippets.rb
@@ -95,6 +95,7 @@ module API
put ':id' do
snippet = snippets_for_current_user.find_by(id: params.delete(:id))
return not_found!('Snippet') unless snippet
+
authorize! :update_personal_snippet, snippet
attrs = declared_params(include_missing: false).merge(request: request, api: true)
diff --git a/lib/api/system_hooks.rb b/lib/api/system_hooks.rb
index 6b6a03e3300..c7a460df46a 100644
--- a/lib/api/system_hooks.rb
+++ b/lib/api/system_hooks.rb
@@ -26,6 +26,7 @@ module API
optional :token, type: String, desc: 'The token used to validate payloads'
optional :push_events, type: Boolean, desc: "Trigger hook on push events"
optional :tag_push_events, type: Boolean, desc: "Trigger hook on tag push events"
+ optional :merge_requests_events, type: Boolean, desc: "Trigger hook on tag push events"
optional :enable_ssl_verification, type: Boolean, desc: "Do SSL verification when triggering the hook"
end
post do
diff --git a/lib/api/tags.rb b/lib/api/tags.rb
index 912415e3a7f..5e0afc6a7e4 100644
--- a/lib/api/tags.rb
+++ b/lib/api/tags.rb
@@ -11,18 +11,23 @@ module API
end
resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
desc 'Get a project repository tags' do
- success Entities::RepoTag
+ success Entities::Tag
end
params do
+ optional :sort, type: String, values: %w[asc desc], default: 'desc',
+ desc: 'Return tags sorted in updated by `asc` or `desc` order.'
+ optional :order_by, type: String, values: %w[name updated], default: 'updated',
+ desc: 'Return tags ordered by `name` or `updated` fields.'
use :pagination
end
get ':id/repository/tags' do
- tags = ::Kaminari.paginate_array(user_project.repository.tags.sort_by(&:name).reverse)
- present paginate(tags), with: Entities::RepoTag, project: user_project
+ tags = ::Kaminari.paginate_array(::TagsFinder.new(user_project.repository, sort: "#{params[:order_by]}_#{params[:sort]}").execute)
+
+ present paginate(tags), with: Entities::Tag, project: user_project
end
desc 'Get a single repository tag' do
- success Entities::RepoTag
+ success Entities::Tag
end
params do
requires :tag_name, type: String, desc: 'The name of the tag'
@@ -31,11 +36,11 @@ module API
tag = user_project.repository.find_tag(params[:tag_name])
not_found!('Tag') unless tag
- present tag, with: Entities::RepoTag, project: user_project
+ present tag, with: Entities::Tag, project: user_project
end
desc 'Create a new repository tag' do
- success Entities::RepoTag
+ success Entities::Tag
end
params do
requires :tag_name, type: String, desc: 'The name of the tag'
@@ -51,7 +56,7 @@ module API
if result[:status] == :success
present result[:tag],
- with: Entities::RepoTag,
+ with: Entities::Tag,
project: user_project
else
render_api_error!(result[:message], 400)
diff --git a/lib/api/templates.rb b/lib/api/templates.rb
index f70bc0622b7..41862768a3f 100644
--- a/lib/api/templates.rb
+++ b/lib/api/templates.rb
@@ -17,15 +17,15 @@ module API
}
}.freeze
PROJECT_TEMPLATE_REGEX =
- /[\<\{\[]
+ %r{[\<\{\[]
(project|description|
one\sline\s.+\swhat\sit\sdoes\.) # matching the start and end is enough here
- [\>\}\]]/xi.freeze
+ [\>\}\]]}xi.freeze
YEAR_TEMPLATE_REGEX = /[<{\[](year|yyyy)[>}\]]/i.freeze
FULLNAME_TEMPLATE_REGEX =
- /[\<\{\[]
+ %r{[\<\{\[]
(fullname|name\sof\s(author|copyright\sowner))
- [\>\}\]]/xi.freeze
+ [\>\}\]]}xi.freeze
helpers do
def parsed_license_template
@@ -49,7 +49,7 @@ module API
desc 'Get the list of the available license template' do
detail 'This feature was introduced in GitLab 8.7.'
- success ::API::Entities::RepoLicense
+ success ::API::Entities::License
end
params do
optional :popular, type: Boolean, desc: 'If passed, returns only popular licenses'
@@ -60,12 +60,12 @@ module API
featured: declared(params)[:popular].present? ? true : nil
}
licences = ::Kaminari.paginate_array(Licensee::License.all(options))
- present paginate(licences), with: Entities::RepoLicense
+ present paginate(licences), with: Entities::License
end
desc 'Get the text for a specific license' do
detail 'This feature was introduced in GitLab 8.7.'
- success ::API::Entities::RepoLicense
+ success ::API::Entities::License
end
params do
requires :name, type: String, desc: 'The name of the template'
@@ -75,7 +75,7 @@ module API
template = parsed_license_template
- present template, with: ::API::Entities::RepoLicense
+ present template, with: ::API::Entities::License
end
GLOBAL_TEMPLATE_TYPES.each do |template_type, properties|
diff --git a/lib/api/time_tracking_endpoints.rb b/lib/api/time_tracking_endpoints.rb
index df4632346dd..2bb451dea89 100644
--- a/lib/api/time_tracking_endpoints.rb
+++ b/lib/api/time_tracking_endpoints.rb
@@ -85,7 +85,7 @@ module API
update_issuable(spend_time: {
duration: Gitlab::TimeTrackingFormatter.parse(params.delete(:duration)),
- user: current_user
+ user_id: current_user.id
})
end
@@ -97,7 +97,7 @@ module API
authorize! update_issuable_key, load_issuable
status :ok
- update_issuable(spend_time: { duration: :reset, user: current_user })
+ update_issuable(spend_time: { duration: :reset, user_id: current_user.id })
end
desc "Show time stats for a project #{issuable_name}"
diff --git a/lib/api/todos.rb b/lib/api/todos.rb
index ffccfebe752..c6dbcf84e3a 100644
--- a/lib/api/todos.rb
+++ b/lib/api/todos.rb
@@ -60,7 +60,7 @@ module API
end
post ':id/mark_as_done' do
TodoService.new.mark_todos_as_done_by_ids(params[:id], current_user)
- todo = Todo.find(params[:id])
+ todo = current_user.todos.find(params[:id])
present todo, with: Entities::Todo, current_user: current_user
end
diff --git a/lib/api/triggers.rb b/lib/api/triggers.rb
index dd6801664b1..b3709455bc3 100644
--- a/lib/api/triggers.rb
+++ b/lib/api/triggers.rb
@@ -15,6 +15,8 @@ module API
optional :variables, type: Hash, desc: 'The list of variables to be injected into build'
end
post ":id/(ref/:ref/)trigger/pipeline", requirements: { ref: /.+/ } do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42283')
+
# validate variables
params[:variables] = params[:variables].to_h
unless params[:variables].all? { |key, value| key.is_a?(String) && value.is_a?(String) }
diff --git a/lib/api/users.rb b/lib/api/users.rb
index bdebda58d3f..3920171205f 100644
--- a/lib/api/users.rb
+++ b/lib/api/users.rb
@@ -2,20 +2,31 @@ module API
class Users < Grape::API
include PaginationParams
include APIGuard
+ include Helpers::CustomAttributes
allow_access_with_scope :read_user, if: -> (request) { request.get? }
resource :users, requirements: { uid: /[0-9]*/, id: /[0-9]*/ } do
+ include CustomAttributesEndpoints
+
before do
authenticate_non_get!
end
helpers do
- def find_user(params)
+ def find_user_by_id(params)
id = params[:user_id] || params[:id]
User.find_by(id: id) || not_found!('User')
end
+ def reorder_users(users)
+ if params[:order_by] && params[:sort]
+ users.reorder(params[:order_by] => params[:sort])
+ else
+ users
+ end
+ end
+
params :optional_attributes do
optional :skype, type: String, desc: 'The Skype username'
optional :linkedin, type: String, desc: 'The LinkedIn username'
@@ -29,11 +40,17 @@ module API
optional :location, type: String, desc: 'The location of the user'
optional :admin, type: Boolean, desc: 'Flag indicating the user is an administrator'
optional :can_create_group, type: Boolean, desc: 'Flag indicating the user can create groups'
- optional :skip_confirmation, type: Boolean, default: false, desc: 'Flag indicating the account is confirmed'
optional :external, type: Boolean, desc: 'Flag indicating the user is an external user'
optional :avatar, type: File, desc: 'Avatar image for user'
all_or_none_of :extern_uid, :provider
end
+
+ params :sort_params do
+ optional :order_by, type: String, values: %w[id name username created_at updated_at],
+ default: 'id', desc: 'Return users ordered by a field'
+ optional :sort, type: String, values: %w[asc desc], default: 'desc',
+ desc: 'Return users sorted in ascending and descending order'
+ end
end
desc 'Get the list of users' do
@@ -52,16 +69,19 @@ module API
optional :created_before, type: DateTime, desc: 'Return users created before the specified time'
all_or_none_of :extern_uid, :provider
+ use :sort_params
use :pagination
+ use :with_custom_attributes
end
get do
authenticated_as_admin! if params[:external].present? || (params[:extern_uid].present? && params[:provider].present?)
unless current_user&.admin?
- params.except!(:created_after, :created_before)
+ params.except!(:created_after, :created_before, :order_by, :sort)
end
users = UsersFinder.new(current_user, params).execute
+ users = reorder_users(users)
authorized = can?(current_user, :read_users_list)
@@ -75,7 +95,10 @@ module API
forbidden!("Not authorized to access /api/v4/users") unless authorized
entity = current_user&.admin? ? Entities::UserWithAdmin : Entities::UserBasic
- present paginate(users), with: entity
+ users = users.preload(:identities, :u2f_registrations) if entity == Entities::UserWithAdmin
+ users, options = with_custom_attributes(users, with: entity)
+
+ present paginate(users), options
end
desc 'Get a single user' do
@@ -83,12 +106,16 @@ module API
end
params do
requires :id, type: Integer, desc: 'The ID of the user'
+
+ use :with_custom_attributes
end
get ":id" do
user = User.find_by(id: params[:id])
not_found!('User') unless user && can?(current_user, :read_user, user)
opts = current_user&.admin? ? { with: Entities::UserWithAdmin } : { with: Entities::User }
+ user, opts = with_custom_attributes(user, opts)
+
present user, opts
end
@@ -99,6 +126,7 @@ module API
requires :email, type: String, desc: 'The email of the user'
optional :password, type: String, desc: 'The password of the new user'
optional :reset_password, type: Boolean, desc: 'Flag indicating the user will be sent a password reset token'
+ optional :skip_confirmation, type: Boolean, desc: 'Flag indicating the account is confirmed'
at_least_one_of :password, :reset_password
requires :name, type: String, desc: 'The name of the user'
requires :username, type: String, desc: 'The username of the user'
@@ -132,6 +160,7 @@ module API
requires :id, type: Integer, desc: 'The ID of the user'
optional :email, type: String, desc: 'The email of the user'
optional :password, type: String, desc: 'The password of the new user'
+ optional :skip_reconfirmation, type: Boolean, desc: 'Flag indicating the account skips the confirmation by email'
optional :name, type: String, desc: 'The name of the user'
optional :username, type: String, desc: 'The username of the user'
use :optional_attributes
@@ -166,7 +195,7 @@ module API
user_params[:password_expires_at] = Time.now if user_params[:password].present?
- result = ::Users::UpdateService.new(user, user_params.except(:extern_uid, :provider)).execute
+ result = ::Users::UpdateService.new(current_user, user_params.except(:extern_uid, :provider).merge(user: user)).execute
if result[:status] == :success
present user, with: Entities::UserPublic
@@ -326,10 +355,9 @@ module API
user = User.find_by(id: params.delete(:id))
not_found!('User') unless user
- email = Emails::CreateService.new(user, declared_params(include_missing: false)).execute
+ email = Emails::CreateService.new(current_user, declared_params(include_missing: false).merge(user: user)).execute
if email.errors.blank?
- NotificationService.new.new_email(email)
present email, with: Entities::Email
else
render_validation_error!(email)
@@ -367,10 +395,8 @@ module API
not_found!('Email') unless email
destroy_conditionally!(email) do |email|
- Emails::DestroyService.new(current_user, email: email.email).execute
+ Emails::DestroyService.new(current_user, user: user).execute(email)
end
-
- user.update_secondary_emails!
end
desc 'Delete a user. Available only for admins.' do
@@ -381,6 +407,8 @@ module API
optional :hard_delete, type: Boolean, desc: "Whether to remove a user's contributions"
end
delete ":id" do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42279')
+
authenticated_as_admin!
user = User.find_by(id: params[:id])
@@ -430,7 +458,7 @@ module API
resource :impersonation_tokens do
helpers do
def finder(options = {})
- user = find_user(params)
+ user = find_user_by_id(params)
PersonalAccessTokensFinder.new({ user: user, impersonation: true }.merge(options))
end
@@ -508,9 +536,7 @@ module API
end
get do
entity =
- if sudo?
- Entities::UserWithPrivateDetails
- elsif current_user.admin?
+ if current_user.admin?
Entities::UserWithAdmin
else
Entities::UserPublic
@@ -672,10 +698,9 @@ module API
requires :email, type: String, desc: 'The new email'
end
post "emails" do
- email = Emails::CreateService.new(current_user, declared_params).execute
+ email = Emails::CreateService.new(current_user, declared_params.merge(user: current_user)).execute
if email.errors.blank?
- NotificationService.new.new_email(email)
present email, with: Entities::Email
else
render_validation_error!(email)
@@ -691,10 +716,8 @@ module API
not_found!('Email') unless email
destroy_conditionally!(email) do |email|
- Emails::DestroyService.new(current_user, email: email.email).execute
+ Emails::DestroyService.new(current_user, user: current_user).execute(email)
end
-
- current_user.update_secondary_emails!
end
desc 'Get a list of user activities'
diff --git a/lib/api/v3/branches.rb b/lib/api/v3/branches.rb
index 81b13249892..25176c5b38e 100644
--- a/lib/api/v3/branches.rb
+++ b/lib/api/v3/branches.rb
@@ -11,12 +11,16 @@ module API
end
resource :projects, requirements: { id: %r{[^/]+} } do
desc 'Get a project repository branches' do
- success ::API::Entities::RepoBranch
+ success ::API::Entities::Branch
end
get ":id/repository/branches" do
- branches = user_project.repository.branches.sort_by(&:name)
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42276')
- present branches, with: ::API::Entities::RepoBranch, project: user_project
+ repository = user_project.repository
+ branches = repository.branches.sort_by(&:name)
+ merged_branch_names = repository.merged_branch_names(branches.map(&:name))
+
+ present branches, with: ::API::Entities::Branch, project: user_project, merged_branch_names: merged_branch_names
end
desc 'Delete a branch'
@@ -47,7 +51,7 @@ module API
end
desc 'Create branch' do
- success ::API::Entities::RepoBranch
+ success ::API::Entities::Branch
end
params do
requires :branch_name, type: String, desc: 'The name of the branch'
@@ -60,7 +64,7 @@ module API
if result[:status] == :success
present result[:branch],
- with: ::API::Entities::RepoBranch,
+ with: ::API::Entities::Branch,
project: user_project
else
render_api_error!(result[:message], 400)
diff --git a/lib/api/v3/builds.rb b/lib/api/v3/builds.rb
index c189d486f50..ac76fece931 100644
--- a/lib/api/v3/builds.rb
+++ b/lib/api/v3/builds.rb
@@ -8,7 +8,7 @@ module API
params do
requires :id, type: String, desc: 'The ID of a project'
end
- resource :projects do
+ resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
helpers do
params :optional_scope do
optional :scope, types: [String, Array[String]], desc: 'The scope of builds to show',
@@ -36,6 +36,7 @@ module API
builds = user_project.builds.order('id DESC')
builds = filter_builds(builds, params[:scope])
+ builds = builds.preload(:user, :job_artifacts_archive, :runner, pipeline: :project)
present paginate(builds), with: ::API::V3::Entities::Build
end
@@ -169,7 +170,7 @@ module API
authorize_update_builds!
build = get_build!(params[:build_id])
- authorize!(:update_build, build)
+ authorize!(:erase_build, build)
return forbidden!('Build is not erasable!') unless build.erasable?
build.erase(erased_by: current_user)
diff --git a/lib/api/v3/commits.rb b/lib/api/v3/commits.rb
index 5936f4700aa..4f6ea8f502e 100644
--- a/lib/api/v3/commits.rb
+++ b/lib/api/v3/commits.rb
@@ -11,9 +11,9 @@ module API
params do
requires :id, type: String, desc: 'The ID of a project'
end
- resource :projects, requirements: { id: %r{[^/]+} } do
+ resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
desc 'Get a project repository commits' do
- success ::API::Entities::RepoCommit
+ success ::API::Entities::Commit
end
params do
optional :ref_name, type: String, desc: 'The name of a repository branch or tag, if not given the default branch is used'
@@ -34,11 +34,11 @@ module API
after: params[:since],
before: params[:until])
- present commits, with: ::API::Entities::RepoCommit
+ present commits, with: ::API::Entities::Commit
end
desc 'Commit multiple file changes as one commit' do
- success ::API::Entities::RepoCommitDetail
+ success ::API::Entities::CommitDetail
detail 'This feature was introduced in GitLab 8.13'
end
params do
@@ -59,25 +59,26 @@ module API
if result[:status] == :success
commit_detail = user_project.repository.commits(result[:result], limit: 1).first
- present commit_detail, with: ::API::Entities::RepoCommitDetail
+ present commit_detail, with: ::API::Entities::CommitDetail
else
render_api_error!(result[:message], 400)
end
end
desc 'Get a specific commit of a project' do
- success ::API::Entities::RepoCommitDetail
+ success ::API::Entities::CommitDetail
failure [[404, 'Not Found']]
end
params do
requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag'
+ optional :stats, type: Boolean, default: true, desc: 'Include commit stats'
end
- get ":id/repository/commits/:sha" do
+ get ":id/repository/commits/:sha", requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
commit = user_project.commit(params[:sha])
not_found! "Commit" unless commit
- present commit, with: ::API::Entities::RepoCommitDetail
+ present commit, with: ::API::Entities::CommitDetail, stats: params[:stats]
end
desc 'Get the diff for a specific commit of a project' do
@@ -86,7 +87,7 @@ module API
params do
requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag'
end
- get ":id/repository/commits/:sha/diff" do
+ get ":id/repository/commits/:sha/diff", requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
commit = user_project.commit(params[:sha])
not_found! "Commit" unless commit
@@ -102,24 +103,24 @@ module API
use :pagination
requires :sha, type: String, desc: 'A commit sha, or the name of a branch or tag'
end
- get ':id/repository/commits/:sha/comments' do
+ get ':id/repository/commits/:sha/comments', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
commit = user_project.commit(params[:sha])
not_found! 'Commit' unless commit
- notes = Note.where(commit_id: commit.id).order(:created_at)
+ notes = commit.notes.order(:created_at)
present paginate(notes), with: ::API::Entities::CommitNote
end
desc 'Cherry pick commit into a branch' do
detail 'This feature was introduced in GitLab 8.15'
- success ::API::Entities::RepoCommit
+ success ::API::Entities::Commit
end
params do
requires :sha, type: String, desc: 'A commit sha to be cherry picked'
requires :branch, type: String, desc: 'The name of the branch'
end
- post ':id/repository/commits/:sha/cherry_pick' do
+ post ':id/repository/commits/:sha/cherry_pick', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
authorize! :push_code, user_project
commit = user_project.commit(params[:sha])
@@ -138,7 +139,7 @@ module API
if result[:status] == :success
branch = user_project.repository.find_branch(params[:branch])
- present user_project.repository.commit(branch.dereferenced_target), with: ::API::Entities::RepoCommit
+ present user_project.repository.commit(branch.dereferenced_target), with: ::API::Entities::Commit
else
render_api_error!(result[:message], 400)
end
@@ -156,7 +157,7 @@ module API
requires :line_type, type: String, values: %w(new old), default: 'new', desc: 'The type of the line'
end
end
- post ':id/repository/commits/:sha/comments' do
+ post ':id/repository/commits/:sha/comments', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
commit = user_project.commit(params[:sha])
not_found! 'Commit' unless commit
@@ -169,11 +170,13 @@ module API
if params[:path]
commit.raw_diffs(limits: false).each do |diff|
next unless diff.new_path == params[:path]
+
lines = Gitlab::Diff::Parser.new.parse(diff.diff.each_line)
lines.each do |line|
next unless line.new_pos == params[:line] && line.type == params[:line_type]
- break opts[:line_code] = Gitlab::Diff::LineCode.generate(diff.new_path, line.new_pos, line.old_pos)
+
+ break opts[:line_code] = Gitlab::Git.diff_line_code(diff.new_path, line.new_pos, line.old_pos)
end
break if opts[:line_code]
diff --git a/lib/api/v3/deploy_keys.rb b/lib/api/v3/deploy_keys.rb
index b90e2061da3..47e54ca85a5 100644
--- a/lib/api/v3/deploy_keys.rb
+++ b/lib/api/v3/deploy_keys.rb
@@ -3,6 +3,16 @@ module API
class DeployKeys < Grape::API
before { authenticate! }
+ helpers do
+ def add_deploy_keys_project(project, attrs = {})
+ project.deploy_keys_projects.create(attrs)
+ end
+
+ def find_by_deploy_key(project, key_id)
+ project.deploy_keys_projects.find_by!(deploy_key: key_id)
+ end
+ end
+
get "deploy_keys" do
authenticated_as_admin!
@@ -18,25 +28,28 @@ module API
%w(keys deploy_keys).each do |path|
desc "Get a specific project's deploy keys" do
- success ::API::Entities::SSHKey
+ success ::API::Entities::DeployKeysProject
end
get ":id/#{path}" do
- present user_project.deploy_keys, with: ::API::Entities::SSHKey
+ keys = user_project.deploy_keys_projects.preload(:deploy_key)
+
+ present keys, with: ::API::Entities::DeployKeysProject
end
desc 'Get single deploy key' do
- success ::API::Entities::SSHKey
+ success ::API::Entities::DeployKeysProject
end
params do
requires :key_id, type: Integer, desc: 'The ID of the deploy key'
end
get ":id/#{path}/:key_id" do
- key = user_project.deploy_keys.find params[:key_id]
- present key, with: ::API::Entities::SSHKey
+ key = find_by_deploy_key(user_project, params[:key_id])
+
+ present key, with: ::API::Entities::DeployKeysProject
end
desc 'Add new deploy key to currently authenticated user' do
- success ::API::Entities::SSHKey
+ success ::API::Entities::DeployKeysProject
end
params do
requires :key, type: String, desc: 'The new deploy key'
@@ -47,24 +60,31 @@ module API
params[:key].strip!
# Check for an existing key joined to this project
- key = user_project.deploy_keys.find_by(key: params[:key])
+ key = user_project.deploy_keys_projects
+ .joins(:deploy_key)
+ .find_by(keys: { key: params[:key] })
+
if key
- present key, with: ::API::Entities::SSHKey
+ present key, with: ::API::Entities::DeployKeysProject
break
end
# Check for available deploy keys in other projects
key = current_user.accessible_deploy_keys.find_by(key: params[:key])
if key
- user_project.deploy_keys << key
- present key, with: ::API::Entities::SSHKey
+ added_key = add_deploy_keys_project(user_project, deploy_key: key, can_push: !!params[:can_push])
+
+ present added_key, with: ::API::Entities::DeployKeysProject
break
end
# Create a new deploy key
- key = DeployKey.new(declared_params(include_missing: false))
- if key.valid? && user_project.deploy_keys << key
- present key, with: ::API::Entities::SSHKey
+ key_attributes = { can_push: !!params[:can_push],
+ deploy_key_attributes: declared_params.except(:can_push) }
+ key = add_deploy_keys_project(user_project, key_attributes)
+
+ if key.valid?
+ present key, with: ::API::Entities::DeployKeysProject
else
render_validation_error!(key)
end
diff --git a/lib/api/v3/entities.rb b/lib/api/v3/entities.rb
index c928ce5265b..68b4d7c3982 100644
--- a/lib/api/v3/entities.rb
+++ b/lib/api/v3/entities.rb
@@ -172,8 +172,8 @@ module API
expose :id
expose :default_projects_limit
expose :signup_enabled
- expose :password_authentication_enabled
- expose :password_authentication_enabled, as: :signin_enabled
+ expose :password_authentication_enabled_for_web, as: :password_authentication_enabled
+ expose :password_authentication_enabled_for_web, as: :signin_enabled
expose :gravatar_enabled
expose :sign_in_text
expose :after_sign_up_text
@@ -207,7 +207,7 @@ module API
end
class Trigger < Grape::Entity
- expose :token, :created_at, :updated_at, :deleted_at, :last_used
+ expose :token, :created_at, :updated_at, :last_used
expose :owner, using: ::API::Entities::UserBasic
end
@@ -220,7 +220,7 @@ module API
expose :created_at, :started_at, :finished_at
expose :user, with: ::API::Entities::User
expose :artifacts_file, using: ::API::Entities::JobArtifactFile, if: -> (build, opts) { build.artifacts? }
- expose :commit, with: ::API::Entities::RepoCommit
+ expose :commit, with: ::API::Entities::Commit
expose :runner, with: ::API::Entities::Runner
expose :pipeline, with: ::API::Entities::PipelineBasic
end
@@ -237,7 +237,7 @@ module API
end
class MergeRequestChanges < MergeRequest
- expose :diffs, as: :changes, using: ::API::Entities::RepoDiff do |compare, _|
+ expose :diffs, as: :changes, using: ::API::Entities::Diff do |compare, _|
compare.raw_diffs(limits: false).to_a
end
end
@@ -252,21 +252,20 @@ module API
class ProjectService < Grape::Entity
expose :id, :title, :created_at, :updated_at, :active
- expose :push_events, :issues_events, :merge_requests_events
- expose :tag_push_events, :note_events, :pipeline_events
+ expose :push_events, :issues_events, :confidential_issues_events
+ expose :merge_requests_events, :tag_push_events, :note_events
+ expose :pipeline_events
expose :job_events, as: :build_events
# Expose serialized properties
expose :properties do |service, options|
- field_names = service.fields
- .select { |field| options[:include_passwords] || field[:type] != 'password' }
- .map { |field| field[:name] }
- service.properties.slice(*field_names)
+ service.properties.slice(*service.api_field_names)
end
end
class ProjectHook < ::API::Entities::Hook
- expose :project_id, :issues_events, :merge_requests_events
- expose :note_events, :pipeline_events, :wiki_page_events
+ expose :project_id, :issues_events, :confidential_issues_events
+ expose :merge_requests_events, :note_events, :pipeline_events
+ expose :wiki_page_events
expose :job_events, as: :build_events
end
diff --git a/lib/api/v3/issues.rb b/lib/api/v3/issues.rb
index cb371fdbab8..b59947d81d9 100644
--- a/lib/api/v3/issues.rb
+++ b/lib/api/v3/issues.rb
@@ -134,6 +134,8 @@ module API
use :issue_params
end
post ':id/issues' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42131')
+
# Setting created_at time only allowed for admins and project owners
unless current_user.admin? || user_project.owner == current_user
params.delete(:created_at)
@@ -169,6 +171,8 @@ module API
:labels, :created_at, :due_date, :confidential, :state_event
end
put ':id/issues/:issue_id' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42132')
+
issue = user_project.issues.find(params.delete(:issue_id))
authorize! :update_issue, issue
@@ -201,6 +205,8 @@ module API
requires :to_project_id, type: Integer, desc: 'The ID of the new project'
end
post ':id/issues/:issue_id/move' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42133')
+
issue = user_project.issues.find_by(id: params[:issue_id])
not_found!('Issue') unless issue
diff --git a/lib/api/v3/labels.rb b/lib/api/v3/labels.rb
index bd5eb2175e8..4157462ec2a 100644
--- a/lib/api/v3/labels.rb
+++ b/lib/api/v3/labels.rb
@@ -11,7 +11,7 @@ module API
success ::API::Entities::Label
end
get ':id/labels' do
- present available_labels, with: ::API::Entities::Label, current_user: current_user, project: user_project
+ present available_labels_for(user_project), with: ::API::Entities::Label, current_user: current_user, project: user_project
end
desc 'Delete an existing label' do
diff --git a/lib/api/v3/members.rb b/lib/api/v3/members.rb
index 684860b553e..88dd598f1e9 100644
--- a/lib/api/v3/members.rb
+++ b/lib/api/v3/members.rb
@@ -22,10 +22,11 @@ module API
get ":id/members" do
source = find_source(source_type, params[:id])
- users = source.users
- users = users.merge(User.search(params[:query])) if params[:query]
+ members = source.members.where.not(user_id: nil).includes(:user)
+ members = members.joins(:user).merge(User.search(params[:query])) if params[:query].present?
+ members = paginate(members)
- present paginate(users), with: ::API::Entities::Member, source: source
+ present members, with: ::API::Entities::Member
end
desc 'Gets a member of a group or project.' do
@@ -40,7 +41,7 @@ module API
members = source.members
member = members.find_by!(user_id: params[:user_id])
- present member.user, with: ::API::Entities::Member, member: member
+ present member, with: ::API::Entities::Member
end
desc 'Adds a member to a group or project.' do
@@ -67,8 +68,9 @@ module API
unless member
member = source.add_user(params[:user_id], params[:access_level], current_user: current_user, expires_at: params[:expires_at])
end
+
if member.persisted? && member.valid?
- present member.user, with: ::API::Entities::Member, member: member
+ present member, with: ::API::Entities::Member
else
# This is to ensure back-compatibility but 400 behavior should be used
# for all validation errors in 9.0!
@@ -92,7 +94,7 @@ module API
member = source.members.find_by!(user_id: params.delete(:user_id))
if member.update_attributes(declared_params(include_missing: false))
- present member.user, with: ::API::Entities::Member, member: member
+ present member, with: ::API::Entities::Member
else
# This is to ensure back-compatibility but 400 behavior should be used
# for all validation errors in 9.0!
@@ -122,9 +124,9 @@ module API
status(200 )
{ message: "Access revoked", id: params[:user_id].to_i }
else
- ::Members::DestroyService.new(source, current_user, declared_params).execute
+ ::Members::DestroyService.new(current_user).execute(member)
- present member.user, with: ::API::Entities::Member, member: member
+ present member, with: ::API::Entities::Member
end
end
end
diff --git a/lib/api/v3/merge_requests.rb b/lib/api/v3/merge_requests.rb
index b6b7254ae29..ce216497996 100644
--- a/lib/api/v3/merge_requests.rb
+++ b/lib/api/v3/merge_requests.rb
@@ -91,6 +91,8 @@ module API
use :optional_params
end
post ":id/merge_requests" do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42126')
+
authorize! :create_merge_request, user_project
mr_params = declared_params(include_missing: false)
@@ -126,6 +128,7 @@ module API
if status == :deprecated
detail DEPRECATION_MESSAGE
end
+
success ::API::V3::Entities::MergeRequest
end
get path do
@@ -135,12 +138,12 @@ module API
end
desc 'Get the commits of a merge request' do
- success ::API::Entities::RepoCommit
+ success ::API::Entities::Commit
end
get "#{path}/commits" do
merge_request = find_merge_request_with_access(params[:merge_request_id])
- present merge_request.commits, with: ::API::Entities::RepoCommit
+ present merge_request.commits, with: ::API::Entities::Commit
end
desc 'Show the merge request changes' do
@@ -166,6 +169,8 @@ module API
:remove_source_branch
end
put path do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42127')
+
merge_request = find_merge_request_with_access(params.delete(:merge_request_id), :update_merge_request)
mr_params = declared_params(include_missing: false)
diff --git a/lib/api/v3/pipelines.rb b/lib/api/v3/pipelines.rb
index c48cbd2b765..6d31c12f572 100644
--- a/lib/api/v3/pipelines.rb
+++ b/lib/api/v3/pipelines.rb
@@ -19,6 +19,8 @@ module API
desc: 'Either running, branches, or tags'
end
get ':id/pipelines' do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42123')
+
authorize! :read_pipeline, user_project
pipelines = PipelinesFinder.new(user_project, scope: params[:scope]).execute
diff --git a/lib/api/v3/project_hooks.rb b/lib/api/v3/project_hooks.rb
index 51014591a93..631944150c7 100644
--- a/lib/api/v3/project_hooks.rb
+++ b/lib/api/v3/project_hooks.rb
@@ -11,6 +11,7 @@ module API
requires :url, type: String, desc: "The URL to send the request to"
optional :push_events, type: Boolean, desc: "Trigger hook on push events"
optional :issues_events, type: Boolean, desc: "Trigger hook on issues events"
+ optional :confidential_issues_events, type: Boolean, desc: "Trigger hook on confidential issues events"
optional :merge_requests_events, type: Boolean, desc: "Trigger hook on merge request events"
optional :tag_push_events, type: Boolean, desc: "Trigger hook on tag push events"
optional :note_events, type: Boolean, desc: "Trigger hook on note(comment) events"
diff --git a/lib/api/v3/project_snippets.rb b/lib/api/v3/project_snippets.rb
index c41fee32610..6ba425ba8c7 100644
--- a/lib/api/v3/project_snippets.rb
+++ b/lib/api/v3/project_snippets.rb
@@ -14,6 +14,7 @@ module API
if errors[:project_access].any?
error!(errors[:project_access], 422)
end
+
not_found!
end
diff --git a/lib/api/v3/projects.rb b/lib/api/v3/projects.rb
index 7c260b8d910..7d8b1f369fe 100644
--- a/lib/api/v3/projects.rb
+++ b/lib/api/v3/projects.rb
@@ -41,6 +41,7 @@ module API
# private or internal, use the more conservative option, private.
attrs[:visibility_level] = (publik == true) ? Gitlab::VisibilityLevel::PUBLIC : Gitlab::VisibilityLevel::PRIVATE
end
+
attrs
end
@@ -172,9 +173,9 @@ module API
use :sort_params
use :pagination
end
- get "/search/:query", requirements: { query: /[^\/]+/ } do
- search_service = Search::GlobalService.new(current_user, search: params[:query]).execute
- projects = search_service.objects('projects', params[:page])
+ get "/search/:query", requirements: { query: %r{[^/]+} } do
+ search_service = ::Search::GlobalService.new(current_user, search: params[:query]).execute
+ projects = search_service.objects('projects', params[:page], false)
projects = projects.reorder(params[:order_by] => params[:sort])
present paginate(projects), with: ::API::V3::Entities::Project
@@ -201,6 +202,7 @@ module API
if project.errors[:limit_reached].present?
error!(project.errors[:limit_reached], 403)
end
+
render_validation_error!(project)
end
end
diff --git a/lib/api/v3/repositories.rb b/lib/api/v3/repositories.rb
index 0eaa0de2eef..5b54734bb45 100644
--- a/lib/api/v3/repositories.rb
+++ b/lib/api/v3/repositories.rb
@@ -8,18 +8,19 @@ module API
params do
requires :id, type: String, desc: 'The ID of a project'
end
- resource :projects, requirements: { id: %r{[^/]+} } do
+ resource :projects, requirements: API::PROJECT_ENDPOINT_REQUIREMENTS do
helpers do
def handle_project_member_errors(errors)
if errors[:project_access].any?
error!(errors[:project_access], 422)
end
+
not_found!
end
end
desc 'Get a project repository tree' do
- success ::API::Entities::RepoTreeObject
+ success ::API::Entities::TreeObject
end
params do
optional :ref_name, type: String, desc: 'The name of a repository branch or tag, if not given the default branch is used'
@@ -35,7 +36,7 @@ module API
tree = user_project.repository.tree(commit.id, path, recursive: params[:recursive])
- present tree.sorted_entries, with: ::API::Entities::RepoTreeObject
+ present tree.sorted_entries, with: ::API::Entities::TreeObject
end
desc 'Get a raw file contents'
@@ -43,7 +44,7 @@ module API
requires :sha, type: String, desc: 'The commit, branch name, or tag name'
requires :filepath, type: String, desc: 'The path to the file to display'
end
- get [":id/repository/blobs/:sha", ":id/repository/commits/:sha/blob"] do
+ get [":id/repository/blobs/:sha", ":id/repository/commits/:sha/blob"], requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
repo = user_project.repository
commit = repo.commit(params[:sha])
not_found! "Commit" unless commit
@@ -56,7 +57,7 @@ module API
params do
requires :sha, type: String, desc: 'The commit, branch name, or tag name'
end
- get ':id/repository/raw_blobs/:sha' do
+ get ':id/repository/raw_blobs/:sha', requirements: API::COMMIT_ENDPOINT_REQUIREMENTS do
repo = user_project.repository
begin
blob = Gitlab::Git::Blob.raw(repo, params[:sha])
diff --git a/lib/api/v3/runners.rb b/lib/api/v3/runners.rb
index faa265f3314..c6d9957d452 100644
--- a/lib/api/v3/runners.rb
+++ b/lib/api/v3/runners.rb
@@ -51,6 +51,7 @@ module API
helpers do
def authenticate_delete_runner!(runner)
return if current_user.admin?
+
forbidden!("Runner is shared") if runner.is_shared?
forbidden!("Runner associated with more than one project") if runner.projects.count > 1
forbidden!("No access granted") unless user_can_access_runner?(runner)
diff --git a/lib/api/v3/services.rb b/lib/api/v3/services.rb
index 2d13d6fabfd..20ca1021c71 100644
--- a/lib/api/v3/services.rb
+++ b/lib/api/v3/services.rb
@@ -395,6 +395,26 @@ module API
desc: 'The Slack token'
}
],
+ 'packagist' => [
+ {
+ required: true,
+ name: :username,
+ type: String,
+ desc: 'The username'
+ },
+ {
+ required: true,
+ name: :token,
+ type: String,
+ desc: 'The Packagist API token'
+ },
+ {
+ required: false,
+ name: :server,
+ type: String,
+ desc: 'The server'
+ }
+ ],
'pipelines-email' => [
{
required: true,
@@ -602,7 +622,7 @@ module API
end
get ":id/services/:service_slug" do
service = user_project.find_or_initialize_service(params[:service_slug].underscore)
- present service, with: Entities::ProjectService, include_passwords: current_user.admin?
+ present service, with: Entities::ProjectService
end
end
diff --git a/lib/api/v3/settings.rb b/lib/api/v3/settings.rb
index 202011cfcbe..9b4ab7630fb 100644
--- a/lib/api/v3/settings.rb
+++ b/lib/api/v3/settings.rb
@@ -44,8 +44,8 @@ module API
requires :domain_blacklist, type: String, desc: 'Users with e-mail addresses that match these domain(s) will NOT be able to sign-up. Wildcards allowed. Use separate lines for multiple entries. Ex: domain.com, *.domain.com'
end
optional :after_sign_up_text, type: String, desc: 'Text shown after sign up'
- optional :password_authentication_enabled, type: Boolean, desc: 'Flag indicating if password authentication is enabled'
- optional :signin_enabled, type: Boolean, desc: 'Flag indicating if password authentication is enabled'
+ optional :password_authentication_enabled, type: Boolean, desc: 'Flag indicating if password authentication is enabled for the web interface'
+ optional :signin_enabled, type: Boolean, desc: 'Flag indicating if password authentication is enabled for the web interface'
mutually_exclusive :password_authentication_enabled, :signin_enabled
optional :require_two_factor_authentication, type: Boolean, desc: 'Require all users to setup Two-factor authentication'
given require_two_factor_authentication: ->(val) { val } do
@@ -131,7 +131,9 @@ module API
attrs = declared_params(include_missing: false)
if attrs.has_key?(:signin_enabled)
- attrs[:password_authentication_enabled] = attrs.delete(:signin_enabled)
+ attrs[:password_authentication_enabled_for_web] = attrs.delete(:signin_enabled)
+ elsif attrs.has_key?(:password_authentication_enabled)
+ attrs[:password_authentication_enabled_for_web] = attrs.delete(:password_authentication_enabled)
end
if current_settings.update_attributes(attrs)
diff --git a/lib/api/v3/snippets.rb b/lib/api/v3/snippets.rb
index 0762fc02d70..85613c8ed84 100644
--- a/lib/api/v3/snippets.rb
+++ b/lib/api/v3/snippets.rb
@@ -91,11 +91,13 @@ module API
put ':id' do
snippet = snippets_for_current_user.find_by(id: params.delete(:id))
return not_found!('Snippet') unless snippet
+
authorize! :update_personal_snippet, snippet
attrs = declared_params(include_missing: false)
UpdateSnippetService.new(nil, current_user, snippet, attrs).execute
+
if snippet.persisted?
present snippet, with: ::API::Entities::PersonalSnippet
else
@@ -113,6 +115,7 @@ module API
delete ':id' do
snippet = snippets_for_current_user.find_by(id: params.delete(:id))
return not_found!('Snippet') unless snippet
+
authorize! :destroy_personal_snippet, snippet
snippet.destroy
no_content!
diff --git a/lib/api/v3/tags.rb b/lib/api/v3/tags.rb
index 7e5875cd030..6e37d31d153 100644
--- a/lib/api/v3/tags.rb
+++ b/lib/api/v3/tags.rb
@@ -8,11 +8,11 @@ module API
end
resource :projects, requirements: { id: %r{[^/]+} } do
desc 'Get a project repository tags' do
- success ::API::Entities::RepoTag
+ success ::API::Entities::Tag
end
get ":id/repository/tags" do
tags = user_project.repository.tags.sort_by(&:name).reverse
- present tags, with: ::API::Entities::RepoTag, project: user_project
+ present tags, with: ::API::Entities::Tag, project: user_project
end
desc 'Delete a repository tag'
diff --git a/lib/api/v3/templates.rb b/lib/api/v3/templates.rb
index 2a2fb59045c..b82b02b5f49 100644
--- a/lib/api/v3/templates.rb
+++ b/lib/api/v3/templates.rb
@@ -16,15 +16,15 @@ module API
}
}.freeze
PROJECT_TEMPLATE_REGEX =
- /[\<\{\[]
+ %r{[\<\{\[]
(project|description|
one\sline\s.+\swhat\sit\sdoes\.) # matching the start and end is enough here
- [\>\}\]]/xi.freeze
+ [\>\}\]]}xi.freeze
YEAR_TEMPLATE_REGEX = /[<{\[](year|yyyy)[>}\]]/i.freeze
FULLNAME_TEMPLATE_REGEX =
- /[\<\{\[]
+ %r{[\<\{\[]
(fullname|name\sof\s(author|copyright\sowner))
- [\>\}\]]/xi.freeze
+ [\>\}\]]}xi.freeze
DEPRECATION_MESSAGE = ' This endpoint is deprecated and has been removed in V4.'.freeze
helpers do
@@ -52,7 +52,7 @@ module API
detailed_desc = 'This feature was introduced in GitLab 8.7.'
detailed_desc << DEPRECATION_MESSAGE unless status == :ok
detail detailed_desc
- success ::API::Entities::RepoLicense
+ success ::API::Entities::License
end
params do
optional :popular, type: Boolean, desc: 'If passed, returns only popular licenses'
@@ -61,7 +61,7 @@ module API
options = {
featured: declared(params)[:popular].present? ? true : nil
}
- present Licensee::License.all(options), with: ::API::Entities::RepoLicense
+ present Licensee::License.all(options), with: ::API::Entities::License
end
end
@@ -70,7 +70,7 @@ module API
detailed_desc = 'This feature was introduced in GitLab 8.7.'
detailed_desc << DEPRECATION_MESSAGE unless status == :ok
detail detailed_desc
- success ::API::Entities::RepoLicense
+ success ::API::Entities::License
end
params do
requires :name, type: String, desc: 'The name of the template'
@@ -80,7 +80,7 @@ module API
template = parsed_license_template
- present template, with: ::API::Entities::RepoLicense
+ present template, with: ::API::Entities::License
end
end
diff --git a/lib/api/v3/time_tracking_endpoints.rb b/lib/api/v3/time_tracking_endpoints.rb
index d5b90e435ba..1aad39815f9 100644
--- a/lib/api/v3/time_tracking_endpoints.rb
+++ b/lib/api/v3/time_tracking_endpoints.rb
@@ -86,7 +86,7 @@ module API
update_issuable(spend_time: {
duration: Gitlab::TimeTrackingFormatter.parse(params.delete(:duration)),
- user: current_user
+ user_id: current_user.id
})
end
@@ -98,7 +98,7 @@ module API
authorize! update_issuable_key, load_issuable
status :ok
- update_issuable(spend_time: { duration: :reset, user: current_user })
+ update_issuable(spend_time: { duration: :reset, user_id: current_user.id })
end
desc "Show time stats for a project #{issuable_name}"
diff --git a/lib/api/v3/todos.rb b/lib/api/v3/todos.rb
index 2f2cf259987..3e2c61f6dbd 100644
--- a/lib/api/v3/todos.rb
+++ b/lib/api/v3/todos.rb
@@ -12,7 +12,7 @@ module API
end
delete ':id' do
TodoService.new.mark_todos_as_done_by_ids(params[:id], current_user)
- todo = Todo.find(params[:id])
+ todo = current_user.todos.find(params[:id])
present todo, with: ::API::Entities::Todo, current_user: current_user
end
diff --git a/lib/api/v3/triggers.rb b/lib/api/v3/triggers.rb
index 534911fde5c..34f07dfb486 100644
--- a/lib/api/v3/triggers.rb
+++ b/lib/api/v3/triggers.rb
@@ -16,6 +16,8 @@ module API
optional :variables, type: Hash, desc: 'The list of variables to be injected into build'
end
post ":id/(ref/:ref/)trigger/builds", requirements: { ref: /.+/ } do
+ Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-ce/issues/42121')
+
# validate variables
params[:variables] = params[:variables].to_h
unless params[:variables].all? { |key, value| key.is_a?(String) && value.is_a?(String) }
diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb
index 1f4bda6f588..4383124d150 100644
--- a/lib/backup/artifacts.rb
+++ b/lib/backup/artifacts.rb
@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
- super('artifacts', ArtifactUploader.local_artifacts_store)
+ super('artifacts', JobArtifactUploader.root)
end
def create_files_dir
diff --git a/lib/backup/database.rb b/lib/backup/database.rb
index d97e5d98229..5e6828de597 100644
--- a/lib/backup/database.rb
+++ b/lib/backup/database.rb
@@ -31,6 +31,7 @@ module Backup
pgsql_args << "-n"
pgsql_args << Gitlab.config.backup.pg_schema
end
+
spawn('pg_dump', *pgsql_args, config['database'], out: compress_wr)
end
compress_wr.close
diff --git a/lib/backup/files.rb b/lib/backup/files.rb
index 30a91647b77..287d591e88d 100644
--- a/lib/backup/files.rb
+++ b/lib/backup/files.rb
@@ -18,7 +18,7 @@ module Backup
FileUtils.rm_f(backup_tarball)
if ENV['STRATEGY'] == 'copy'
- cmd = %W(cp -a #{app_files_dir} #{Gitlab.config.backup.path})
+ cmd = %W(rsync -a --exclude=lost+found #{app_files_dir} #{Gitlab.config.backup.path})
output, status = Gitlab::Popen.popen(cmd)
unless status.zero?
@@ -26,10 +26,10 @@ module Backup
abort 'Backup failed'
end
- run_pipeline!([%W(tar -C #{@backup_files_dir} -cf - .), %w(gzip -c -1)], out: [backup_tarball, 'w', 0600])
+ run_pipeline!([%W(tar --exclude=lost+found -C #{@backup_files_dir} -cf - .), %w(gzip -c -1)], out: [backup_tarball, 'w', 0600])
FileUtils.rm_rf(@backup_files_dir)
else
- run_pipeline!([%W(tar -C #{app_files_dir} -cf - .), %w(gzip -c -1)], out: [backup_tarball, 'w', 0600])
+ run_pipeline!([%W(tar --exclude=lost+found -C #{app_files_dir} -cf - .), %w(gzip -c -1)], out: [backup_tarball, 'w', 0600])
end
end
diff --git a/lib/backup/manager.rb b/lib/backup/manager.rb
index 3cf3939994a..f27ce4d2b2b 100644
--- a/lib/backup/manager.rb
+++ b/lib/backup/manager.rb
@@ -101,50 +101,55 @@ module Backup
end
def unpack
- Dir.chdir(backup_path)
-
- # check for existing backups in the backup dir
- if backup_file_list.empty?
- $progress.puts "No backups found in #{backup_path}"
- $progress.puts "Please make sure that file name ends with #{FILE_NAME_SUFFIX}"
- exit 1
- elsif backup_file_list.many? && ENV["BACKUP"].nil?
- $progress.puts 'Found more than one backup, please specify which one you want to restore:'
- $progress.puts 'rake gitlab:backup:restore BACKUP=timestamp_of_backup'
- exit 1
- end
+ Dir.chdir(backup_path) do
+ # check for existing backups in the backup dir
+ if backup_file_list.empty?
+ $progress.puts "No backups found in #{backup_path}"
+ $progress.puts "Please make sure that file name ends with #{FILE_NAME_SUFFIX}"
+ exit 1
+ elsif backup_file_list.many? && ENV["BACKUP"].nil?
+ $progress.puts 'Found more than one backup:'
+ # print list of available backups
+ $progress.puts " " + available_timestamps.join("\n ")
+ $progress.puts 'Please specify which one you want to restore:'
+ $progress.puts 'rake gitlab:backup:restore BACKUP=timestamp_of_backup'
+ exit 1
+ end
- tar_file = if ENV['BACKUP'].present?
- "#{ENV['BACKUP']}#{FILE_NAME_SUFFIX}"
- else
- backup_file_list.first
- end
+ tar_file = if ENV['BACKUP'].present?
+ "#{ENV['BACKUP']}#{FILE_NAME_SUFFIX}"
+ else
+ backup_file_list.first
+ end
- unless File.exist?(tar_file)
- $progress.puts "The backup file #{tar_file} does not exist!"
- exit 1
- end
+ unless File.exist?(tar_file)
+ $progress.puts "The backup file #{tar_file} does not exist!"
+ exit 1
+ end
- $progress.print 'Unpacking backup ... '
+ $progress.print 'Unpacking backup ... '
- unless Kernel.system(*%W(tar -xf #{tar_file}))
- $progress.puts 'unpacking backup failed'.color(:red)
- exit 1
- else
- $progress.puts 'done'.color(:green)
- end
+ unless Kernel.system(*%W(tar -xf #{tar_file}))
+ $progress.puts 'unpacking backup failed'.color(:red)
+ exit 1
+ else
+ $progress.puts 'done'.color(:green)
+ end
- ENV["VERSION"] = "#{settings[:db_version]}" if settings[:db_version].to_i > 0
-
- # restoring mismatching backups can lead to unexpected problems
- if settings[:gitlab_version] != Gitlab::VERSION
- $progress.puts 'GitLab version mismatch:'.color(:red)
- $progress.puts " Your current GitLab version (#{Gitlab::VERSION}) differs from the GitLab version in the backup!".color(:red)
- $progress.puts ' Please switch to the following version and try again:'.color(:red)
- $progress.puts " version: #{settings[:gitlab_version]}".color(:red)
- $progress.puts
- $progress.puts "Hint: git checkout v#{settings[:gitlab_version]}"
- exit 1
+ ENV["VERSION"] = "#{settings[:db_version]}" if settings[:db_version].to_i > 0
+
+ # restoring mismatching backups can lead to unexpected problems
+ if settings[:gitlab_version] != Gitlab::VERSION
+ $progress.puts(<<~HEREDOC.color(:red))
+ GitLab version mismatch:
+ Your current GitLab version (#{Gitlab::VERSION}) differs from the GitLab version in the backup!
+ Please switch to the following version and try again:
+ version: #{settings[:gitlab_version]}
+ HEREDOC
+ $progress.puts
+ $progress.puts "Hint: git checkout v#{settings[:gitlab_version]}"
+ exit 1
+ end
end
end
@@ -167,6 +172,10 @@ module Backup
@backup_file_list ||= Dir.glob("*#{FILE_NAME_SUFFIX}")
end
+ def available_timestamps
+ @backup_file_list.map {|item| item.gsub("#{FILE_NAME_SUFFIX}", "")}
+ end
+
def connect_to_remote_directory(connection_settings)
# our settings use string keys, but Fog expects symbols
connection = ::Fog::Storage.new(connection_settings.symbolize_keys)
diff --git a/lib/backup/repository.rb b/lib/backup/repository.rb
index 4e92be85110..6715159a1aa 100644
--- a/lib/backup/repository.rb
+++ b/lib/backup/repository.rb
@@ -7,12 +7,16 @@ module Backup
prepare
Project.find_each(batch_size: 1000) do |project|
- progress.print " * #{project.full_path} ... "
+ progress.print " * #{display_repo_path(project)} ... "
path_to_project_repo = path_to_repo(project)
path_to_project_bundle = path_to_bundle(project)
- # Create namespace dir if missing
- FileUtils.mkdir_p(File.join(backup_repos_path, project.namespace.full_path)) if project.namespace
+ # Create namespace dir or hashed path if missing
+ if project.hashed_storage?(:repository)
+ FileUtils.mkdir_p(File.dirname(File.join(backup_repos_path, project.disk_path)))
+ else
+ FileUtils.mkdir_p(File.join(backup_repos_path, project.namespace.full_path)) if project.namespace
+ end
if empty_repo?(project)
progress.puts "[SKIPPED]".color(:cyan)
@@ -42,7 +46,8 @@ module Backup
path_to_wiki_bundle = path_to_bundle(wiki)
if File.exist?(path_to_wiki_repo)
- progress.print " * #{wiki.full_path} ... "
+ progress.print " * #{display_repo_path(wiki)} ... "
+
if empty_repo?(wiki)
progress.puts " [SKIPPED]".color(:cyan)
else
@@ -71,14 +76,14 @@ module Backup
end
Project.find_each(batch_size: 1000) do |project|
- progress.print " * #{project.full_path} ... "
+ progress.print " * #{display_repo_path(project)} ... "
path_to_project_repo = path_to_repo(project)
path_to_project_bundle = path_to_bundle(project)
project.ensure_storage_path_exists
cmd = if File.exist?(path_to_project_bundle)
- %W(#{Gitlab.config.git.bin_path} clone --bare #{path_to_project_bundle} #{path_to_project_repo})
+ %W(#{Gitlab.config.git.bin_path} clone --bare --mirror #{path_to_project_bundle} #{path_to_project_repo})
else
%W(#{Gitlab.config.git.bin_path} init --bare #{path_to_project_repo})
end
@@ -104,7 +109,7 @@ module Backup
path_to_wiki_bundle = path_to_bundle(wiki)
if File.exist?(path_to_wiki_bundle)
- progress.print " * #{wiki.full_path} ... "
+ progress.print " * #{display_repo_path(wiki)} ... "
# If a wiki bundle exists, first remove the empty repo
# that was initialized with ProjectWiki.new() and then
@@ -185,16 +190,13 @@ module Backup
def progress_warn(project, cmd, output)
progress.puts "[WARNING] Executing #{cmd}".color(:orange)
- progress.puts "Ignoring error on #{project.full_path} - #{output}".color(:orange)
+ progress.puts "Ignoring error on #{display_repo_path(project)} - #{output}".color(:orange)
end
def empty_repo?(project_or_wiki)
- project_or_wiki.repository.expire_exists_cache # protect backups from stale cache
- project_or_wiki.repository.empty_repo?
- rescue => e
- progress.puts "Ignoring repository error and continuing backing up project: #{project_or_wiki.full_path} - #{e.message}".color(:orange)
-
- false
+ # Protect against stale caches
+ project_or_wiki.repository.expire_emptiness_caches
+ project_or_wiki.repository.empty?
end
def repository_storage_paths_args
@@ -204,5 +206,9 @@ module Backup
def progress
$progress
end
+
+ def display_repo_path(project)
+ project.hashed_storage?(:repository) ? "#{project.full_path} (#{project.disk_path})" : project.full_path
+ end
end
end
diff --git a/lib/banzai.rb b/lib/banzai.rb
index 35ca234c1ba..5df98f66f3b 100644
--- a/lib/banzai.rb
+++ b/lib/banzai.rb
@@ -3,8 +3,8 @@ module Banzai
Renderer.render(text, context)
end
- def self.render_field(object, field)
- Renderer.render_field(object, field)
+ def self.render_field(object, field, context = {})
+ Renderer.render_field(object, field, context)
end
def self.cache_collection_render(texts_and_contexts)
diff --git a/lib/banzai/color_parser.rb b/lib/banzai/color_parser.rb
new file mode 100644
index 00000000000..355c364b07b
--- /dev/null
+++ b/lib/banzai/color_parser.rb
@@ -0,0 +1,44 @@
+module Banzai
+ module ColorParser
+ ALPHA = /0(?:\.\d+)?|\.\d+|1(?:\.0+)?/ # 0.0..1.0
+ PERCENTS = /(?:\d{1,2}|100)%/ # 00%..100%
+ ALPHA_CHANNEL = /(?:,\s*(?:#{ALPHA}|#{PERCENTS}))?/
+ BITS = /\d{1,2}|1\d\d|2(?:[0-4]\d|5[0-5])/ # 00..255
+ DEGS = /-?\d+(?:deg)?/i # [-]digits[deg]
+ RADS = /-?(?:\d+(?:\.\d+)?|\.\d+)rad/i # [-](digits[.digits] OR .digits)rad
+ HEX_FORMAT = /\#(?:\h{3}|\h{4}|\h{6}|\h{8})/
+ RGB_FORMAT = %r{
+ (?:rgba?
+ \(
+ (?:
+ (?:(?:#{BITS},\s*){2}#{BITS})
+ |
+ (?:(?:#{PERCENTS},\s*){2}#{PERCENTS})
+ )
+ #{ALPHA_CHANNEL}
+ \)
+ )
+ }xi
+ HSL_FORMAT = %r{
+ (?:hsla?
+ \(
+ (?:#{DEGS}|#{RADS}),\s*#{PERCENTS},\s*#{PERCENTS}
+ #{ALPHA_CHANNEL}
+ \)
+ )
+ }xi
+
+ FORMATS = [HEX_FORMAT, RGB_FORMAT, HSL_FORMAT].freeze
+
+ COLOR_FORMAT = /\A(#{Regexp.union(FORMATS)})\z/ix
+
+ # Public: Analyzes whether the String is a color code.
+ #
+ # text - The String to be parsed.
+ #
+ # Returns the recognized color String or nil if none was found.
+ def self.parse(text)
+ text if COLOR_FORMAT =~ text
+ end
+ end
+end
diff --git a/lib/banzai/cross_project_reference.rb b/lib/banzai/cross_project_reference.rb
index e2b57adf611..d8fb7705b2a 100644
--- a/lib/banzai/cross_project_reference.rb
+++ b/lib/banzai/cross_project_reference.rb
@@ -11,7 +11,7 @@ module Banzai
# ref - String reference.
#
# Returns a Project, or nil if the reference can't be found
- def project_from_ref(ref)
+ def parent_from_ref(ref)
return context[:project] unless ref
Project.find_by_full_path(ref)
diff --git a/lib/banzai/filter/absolute_link_filter.rb b/lib/banzai/filter/absolute_link_filter.rb
new file mode 100644
index 00000000000..1ec6201523f
--- /dev/null
+++ b/lib/banzai/filter/absolute_link_filter.rb
@@ -0,0 +1,34 @@
+require 'uri'
+
+module Banzai
+ module Filter
+ # HTML filter that converts relative urls into absolute ones.
+ class AbsoluteLinkFilter < HTML::Pipeline::Filter
+ def call
+ return doc unless context[:only_path] == false
+
+ doc.search('a.gfm').each do |el|
+ process_link_attr el.attribute('href')
+ end
+
+ doc
+ end
+
+ protected
+
+ def process_link_attr(html_attr)
+ return if html_attr.blank?
+ return if html_attr.value.start_with?('//')
+
+ uri = URI(html_attr.value)
+ html_attr.value = absolute_link_attr(uri) if uri.relative?
+ rescue URI::Error
+ # noop
+ end
+
+ def absolute_link_attr(uri)
+ URI.join(Gitlab.config.gitlab.url, uri).to_s
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/abstract_reference_filter.rb b/lib/banzai/filter/abstract_reference_filter.rb
index ef4578aabd6..c9e3f8ce42b 100644
--- a/lib/banzai/filter/abstract_reference_filter.rb
+++ b/lib/banzai/filter/abstract_reference_filter.rb
@@ -82,9 +82,9 @@ module Banzai
end
end
- def project_from_ref_cached(ref)
- cached_call(:banzai_project_refs, ref) do
- project_from_ref(ref)
+ def from_ref_cached(ref)
+ cached_call("banzai_#{parent_type}_refs".to_sym, ref) do
+ parent_from_ref(ref)
end
end
@@ -95,7 +95,7 @@ module Banzai
end
def call
- return doc if project.nil?
+ return doc unless project || group
ref_pattern = object_class.reference_pattern
link_pattern = object_class.link_reference_pattern
@@ -153,15 +153,20 @@ module Banzai
# have `gfm` and `gfm-OBJECT_NAME` class names attached for styling.
def object_link_filter(text, pattern, link_content: nil, link_reference: false)
references_in(text, pattern) do |match, id, project_ref, namespace_ref, matches|
- project_path = full_project_path(namespace_ref, project_ref)
- project = project_from_ref_cached(project_path)
+ parent_path = if parent_type == :group
+ full_group_path(namespace_ref)
+ else
+ full_project_path(namespace_ref, project_ref)
+ end
- if project
+ parent = from_ref_cached(parent_path)
+
+ if parent
object =
if link_reference
- find_object_from_link_cached(project, id)
+ find_object_from_link_cached(parent, id)
else
- find_object_cached(project, id)
+ find_object_cached(parent, id)
end
end
@@ -169,13 +174,15 @@ module Banzai
title = object_link_title(object)
klass = reference_class(object_sym)
- data = data_attributes_for(link_content || match, project, object, link: !!link_content)
+ data = data_attributes_for(link_content || match, parent, object,
+ link_content: !!link_content,
+ link_reference: link_reference)
url =
if matches.names.include?("url") && matches[:url]
matches[:url]
else
- url_for_object_cached(object, project)
+ url_for_object_cached(object, parent)
end
content = link_content || object_link_text(object, matches)
@@ -189,12 +196,13 @@ module Banzai
end
end
- def data_attributes_for(text, project, object, link: false)
+ def data_attributes_for(text, project, object, link_content: false, link_reference: false)
data_attribute(
- original: text,
- link: link,
- project: project.id,
- object_sym => object.id
+ original: text,
+ link: link_content,
+ link_reference: link_reference,
+ project: project.id,
+ object_sym => object.id
)
end
@@ -213,7 +221,8 @@ module Banzai
end
def object_link_text(object, matches)
- text = object.reference_link_text(context[:project])
+ parent = context[:project] || context[:group]
+ text = object.reference_link_text(parent)
extras = object_link_text_extras(object, matches)
text += " (#{extras.join(", ")})" if extras.any?
@@ -223,17 +232,24 @@ module Banzai
# Returns a Hash containing all object references (e.g. issue IDs) per the
# project they belong to.
- def references_per_project
- @references_per_project ||= begin
+ def references_per_parent
+ @references_per ||= {}
+
+ @references_per[parent_type] ||= begin
refs = Hash.new { |hash, key| hash[key] = Set.new }
regex = Regexp.union(object_class.reference_pattern, object_class.link_reference_pattern)
nodes.each do |node|
node.to_html.scan(regex) do
- project_path = full_project_path($~[:namespace], $~[:project])
+ path = if parent_type == :project
+ full_project_path($~[:namespace], $~[:project])
+ else
+ full_group_path($~[:group])
+ end
+
symbol = $~[object_sym]
- refs[project_path] << symbol if object_class.reference_valid?(symbol)
+ refs[path] << symbol if object_class.reference_valid?(symbol)
end
end
@@ -243,35 +259,41 @@ module Banzai
# Returns a Hash containing referenced projects grouped per their full
# path.
- def projects_per_reference
- @projects_per_reference ||= begin
+ def parent_per_reference
+ @per_reference ||= {}
+
+ @per_reference[parent_type] ||= begin
refs = Set.new
- references_per_project.each do |project_ref, _|
- refs << project_ref
+ references_per_parent.each do |ref, _|
+ refs << ref
end
- find_projects_for_paths(refs.to_a).index_by(&:full_path)
+ find_for_paths(refs.to_a).index_by(&:full_path)
end
end
- def projects_relation_for_paths(paths)
- Project.where_full_path_in(paths).includes(:namespace)
+ def relation_for_paths(paths)
+ klass = parent_type.to_s.camelize.constantize
+ result = klass.where_full_path_in(paths)
+ return result if parent_type == :group
+
+ result.includes(:namespace) if parent_type == :project
end
# Returns projects for the given paths.
- def find_projects_for_paths(paths)
+ def find_for_paths(paths)
if RequestStore.active?
- cache = project_refs_cache
+ cache = refs_cache
to_query = paths - cache.keys
unless to_query.empty?
- projects = projects_relation_for_paths(to_query)
+ records = relation_for_paths(to_query)
found = []
- projects.each do |project|
- ref = project.full_path
- get_or_set_cache(cache, ref) { project }
+ records.each do |record|
+ ref = record.full_path
+ get_or_set_cache(cache, ref) { record }
found << ref
end
@@ -283,53 +305,37 @@ module Banzai
cache.slice(*paths).values.compact
else
- projects_relation_for_paths(paths)
+ relation_for_paths(paths)
end
end
- def current_project_path
- @current_project_path ||= project.full_path
+ def current_parent_path
+ @current_parent_path ||= parent&.full_path
end
def current_project_namespace_path
- @current_project_namespace_path ||= project.namespace.full_path
+ @current_project_namespace_path ||= project&.namespace&.full_path
end
private
def full_project_path(namespace, project_ref)
- return current_project_path unless project_ref
+ return current_parent_path unless project_ref
namespace_ref = namespace || current_project_namespace_path
"#{namespace_ref}/#{project_ref}"
end
- def project_refs_cache
- RequestStore[:banzai_project_refs] ||= {}
+ def refs_cache
+ RequestStore["banzai_#{parent_type}_refs".to_sym] ||= {}
end
- def cached_call(request_store_key, cache_key, path: [])
- if RequestStore.active?
- cache = RequestStore[request_store_key] ||= Hash.new do |hash, key|
- hash[key] = Hash.new { |h, k| h[k] = {} }
- end
-
- cache = cache.dig(*path) if path.any?
-
- get_or_set_cache(cache, cache_key) { yield }
- else
- yield
- end
+ def parent_type
+ :project
end
- def get_or_set_cache(cache, key)
- if cache.key?(key)
- cache[key]
- else
- value = yield
- cache[key] = value if key.present?
- value
- end
+ def parent
+ parent_type == :project ? project : group
end
end
end
diff --git a/lib/banzai/filter/autolink_filter.rb b/lib/banzai/filter/autolink_filter.rb
index b8d2673c1a6..75b64ae9af2 100644
--- a/lib/banzai/filter/autolink_filter.rb
+++ b/lib/banzai/filter/autolink_filter.rb
@@ -25,8 +25,8 @@ module Banzai
# period or comma for punctuation without those characters being included
# in the generated link.
#
- # Rubular: http://rubular.com/r/cxjPyZc7Sb
- LINK_PATTERN = %r{([a-z][a-z0-9\+\.-]+://\S+)(?<!,|\.)}
+ # Rubular: http://rubular.com/r/JzPhi6DCZp
+ LINK_PATTERN = %r{([a-z][a-z0-9\+\.-]+://[^\s>]+)(?<!,|\.)}
# Text matching LINK_PATTERN inside these elements will not be linked
IGNORE_PARENTS = %w(a code kbd pre script style).to_set
@@ -35,53 +35,19 @@ module Banzai
TEXT_QUERY = %Q(descendant-or-self::text()[
not(#{IGNORE_PARENTS.map { |p| "ancestor::#{p}" }.join(' or ')})
and contains(., '://')
- and not(starts-with(., 'http'))
- and not(starts-with(., 'ftp'))
]).freeze
+ PUNCTUATION_PAIRS = {
+ "'" => "'",
+ '"' => '"',
+ ')' => '(',
+ ']' => '[',
+ '}' => '{'
+ }.freeze
+
def call
return doc if context[:autolink] == false
- rinku_parse
- text_parse
- end
-
- private
-
- # Run the text through Rinku as a first pass
- #
- # This will quickly autolink http(s) and ftp links.
- #
- # `@doc` will be re-parsed with the HTML String from Rinku.
- def rinku_parse
- # Convert the options from a Hash to a String that Rinku expects
- options = tag_options(link_options)
-
- # NOTE: We don't parse email links because it will erroneously match
- # external Commit and CommitRange references.
- #
- # The final argument tells Rinku to link short URLs that don't include a
- # period (e.g., http://localhost:3000/)
- rinku = Rinku.auto_link(html, :urls, options, IGNORE_PARENTS.to_a, 1)
-
- return if rinku == html
-
- # Rinku returns a String, so parse it back to a Nokogiri::XML::Document
- # for further processing.
- @doc = parse_html(rinku)
- end
-
- # Return true if any of the UNSAFE_PROTOCOLS strings are included in the URI scheme
- def contains_unsafe?(scheme)
- return false unless scheme
-
- scheme = scheme.strip.downcase
- Banzai::Filter::SanitizationFilter::UNSAFE_PROTOCOLS.any? { |protocol| scheme.include?(protocol) }
- end
-
- # Autolinks any text matching LINK_PATTERN that Rinku didn't already
- # replace
- def text_parse
doc.xpath(TEXT_QUERY).each do |node|
content = node.to_html
@@ -97,6 +63,16 @@ module Banzai
doc
end
+ private
+
+ # Return true if any of the UNSAFE_PROTOCOLS strings are included in the URI scheme
+ def contains_unsafe?(scheme)
+ return false unless scheme
+
+ scheme = scheme.strip.downcase
+ Banzai::Filter::SanitizationFilter::UNSAFE_PROTOCOLS.any? { |protocol| scheme.include?(protocol) }
+ end
+
def autolink_match(match)
# start by stripping out dangerous links
begin
@@ -112,12 +88,30 @@ module Banzai
match.gsub!(/((?:&[\w#]+;)+)\z/, '')
dropped = ($1 || '').html_safe
+ # To match the behaviour of Rinku, if the matched link ends with a
+ # closing part of a matched pair of punctuation, we remove that trailing
+ # character unless there are an equal number of closing and opening
+ # characters in the link.
+ if match.end_with?(*PUNCTUATION_PAIRS.keys)
+ close_character = match[-1]
+ close_count = match.count(close_character)
+ open_character = PUNCTUATION_PAIRS[close_character]
+ open_count = match.count(open_character)
+
+ if open_count != close_count || open_character == close_character
+ dropped += close_character
+ match = match[0..-2]
+ end
+ end
+
options = link_options.merge(href: match)
- content_tag(:a, match, options) + dropped
+ content_tag(:a, match.html_safe, options) + dropped
end
def autolink_filter(text)
- text.gsub(LINK_PATTERN) { |match| autolink_match(match) }
+ Gitlab::StringRegexMarker.new(CGI.unescapeHTML(text), text.html_safe).mark(LINK_PATTERN) do |link, left:, right:|
+ autolink_match(link)
+ end
end
def link_options
diff --git a/lib/banzai/filter/color_filter.rb b/lib/banzai/filter/color_filter.rb
new file mode 100644
index 00000000000..6ab29ac281f
--- /dev/null
+++ b/lib/banzai/filter/color_filter.rb
@@ -0,0 +1,31 @@
+module Banzai
+ module Filter
+ # HTML filter that renders `color` followed by a color "chip".
+ #
+ class ColorFilter < HTML::Pipeline::Filter
+ COLOR_CHIP_CLASS = 'gfm-color_chip'.freeze
+
+ def call
+ doc.css('code').each do |node|
+ color = ColorParser.parse(node.content)
+ node << color_chip(color) if color
+ end
+
+ doc
+ end
+
+ private
+
+ def color_chip(color)
+ checkerboard = doc.document.create_element('span', class: COLOR_CHIP_CLASS)
+ chip = doc.document.create_element('span', style: inline_styles(color: color))
+
+ checkerboard << chip
+ end
+
+ def inline_styles(color:)
+ "background-color: #{color};"
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/commit_reference_filter.rb b/lib/banzai/filter/commit_reference_filter.rb
index 714e0319025..43bf4fc6565 100644
--- a/lib/banzai/filter/commit_reference_filter.rb
+++ b/lib/banzai/filter/commit_reference_filter.rb
@@ -18,14 +18,35 @@ module Banzai
def find_object(project, id)
if project && project.valid_repo?
- project.commit(id)
+ # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/43894
+ Gitlab::GitalyClient.allow_n_plus_1_calls { project.commit(id) }
+ end
+ end
+
+ def referenced_merge_request_commit_shas
+ return [] unless noteable.is_a?(MergeRequest)
+
+ @referenced_merge_request_commit_shas ||= begin
+ referenced_shas = references_per_parent.values.reduce(:|).to_a
+ noteable.all_commit_shas.select do |sha|
+ referenced_shas.any? { |ref| Gitlab::Git.shas_eql?(sha, ref) }
+ end
end
end
def url_for_object(commit, project)
h = Gitlab::Routing.url_helpers
- h.project_commit_url(project, commit,
- only_path: context[:only_path])
+
+ if referenced_merge_request_commit_shas.include?(commit.id)
+ h.diffs_project_merge_request_url(project,
+ noteable,
+ commit_id: commit.id,
+ only_path: only_path?)
+ else
+ h.project_commit_url(project,
+ commit,
+ only_path: only_path?)
+ end
end
def object_link_text_extras(object, matches)
@@ -38,6 +59,16 @@ module Banzai
extras
end
+
+ private
+
+ def noteable
+ context[:noteable]
+ end
+
+ def only_path?
+ context[:only_path]
+ end
end
end
end
diff --git a/lib/banzai/filter/emoji_filter.rb b/lib/banzai/filter/emoji_filter.rb
index 6255a611dbe..b82c6ca6393 100644
--- a/lib/banzai/filter/emoji_filter.rb
+++ b/lib/banzai/filter/emoji_filter.rb
@@ -54,9 +54,9 @@ module Banzai
# Build a regexp that matches all valid :emoji: names.
def self.emoji_pattern
@emoji_pattern ||=
- /(?<=[^[:alnum:]:]|\n|^)
+ %r{(?<=[^[:alnum:]:]|\n|^)
:(#{Gitlab::Emoji.emojis_names.map { |name| Regexp.escape(name) }.join('|')}):
- (?=[^[:alnum:]:]|$)/x
+ (?=[^[:alnum:]:]|$)}x
end
# Build a regexp that matches all valid unicode emojis names.
diff --git a/lib/banzai/filter/epic_reference_filter.rb b/lib/banzai/filter/epic_reference_filter.rb
new file mode 100644
index 00000000000..265924abe24
--- /dev/null
+++ b/lib/banzai/filter/epic_reference_filter.rb
@@ -0,0 +1,12 @@
+module Banzai
+ module Filter
+ # The actual filter is implemented in the EE mixin
+ class EpicReferenceFilter < IssuableReferenceFilter
+ self.reference_type = :epic
+
+ def self.object_class
+ Epic
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/gollum_tags_filter.rb b/lib/banzai/filter/gollum_tags_filter.rb
index 2e259904673..c2b42673376 100644
--- a/lib/banzai/filter/gollum_tags_filter.rb
+++ b/lib/banzai/filter/gollum_tags_filter.rb
@@ -51,10 +51,10 @@ module Banzai
# See https://github.com/gollum/gollum/wiki
#
# Rubular: http://rubular.com/r/7dQnE5CUCH
- TAGS_PATTERN = %r{\[\[(.+?)\]\]}.freeze
+ TAGS_PATTERN = /\[\[(.+?)\]\]/.freeze
# Pattern to match allowed image extensions
- ALLOWED_IMAGE_EXTENSIONS = %r{.+(jpg|png|gif|svg|bmp)\z}i.freeze
+ ALLOWED_IMAGE_EXTENSIONS = /.+(jpg|png|gif|svg|bmp)\z/i.freeze
def call
search_text_nodes(doc).each do |node|
diff --git a/lib/banzai/filter/html_entity_filter.rb b/lib/banzai/filter/html_entity_filter.rb
index f3bd587c28b..e008fd428b0 100644
--- a/lib/banzai/filter/html_entity_filter.rb
+++ b/lib/banzai/filter/html_entity_filter.rb
@@ -5,7 +5,7 @@ module Banzai
# Text filter that escapes these HTML entities: & " < >
class HtmlEntityFilter < HTML::Pipeline::TextFilter
def call
- ERB::Util.html_escape_once(text)
+ ERB::Util.html_escape(text)
end
end
end
diff --git a/lib/banzai/filter/issuable_reference_filter.rb b/lib/banzai/filter/issuable_reference_filter.rb
new file mode 100644
index 00000000000..7addf09be73
--- /dev/null
+++ b/lib/banzai/filter/issuable_reference_filter.rb
@@ -0,0 +1,31 @@
+module Banzai
+ module Filter
+ class IssuableReferenceFilter < AbstractReferenceFilter
+ def records_per_parent
+ @records_per_project ||= {}
+
+ @records_per_project[object_class.to_s.underscore] ||= begin
+ hash = Hash.new { |h, k| h[k] = {} }
+
+ parent_per_reference.each do |path, parent|
+ record_ids = references_per_parent[path]
+
+ parent_records(parent, record_ids).each do |record|
+ hash[parent][record.iid.to_i] = record
+ end
+ end
+
+ hash
+ end
+ end
+
+ def find_object(parent, iid)
+ records_per_parent[parent][iid]
+ end
+
+ def parent_from_ref(ref)
+ parent_per_reference[ref || current_parent_path]
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/issuable_state_filter.rb b/lib/banzai/filter/issuable_state_filter.rb
index 327ea9449a1..77299abe324 100644
--- a/lib/banzai/filter/issuable_state_filter.rb
+++ b/lib/banzai/filter/issuable_state_filter.rb
@@ -15,6 +15,8 @@ module Banzai
issuables = extractor.extract([doc])
issuables.each do |node, issuable|
+ next if !can_read_cross_project? && issuable.project != project
+
if VISIBLE_STATES.include?(issuable.state) && node.inner_html == issuable.reference_link_text(project)
node.content += " (#{issuable.state})"
end
@@ -25,6 +27,10 @@ module Banzai
private
+ def can_read_cross_project?
+ Ability.allowed?(current_user, :read_cross_project)
+ end
+
def current_user
context[:current_user]
end
diff --git a/lib/banzai/filter/issue_reference_filter.rb b/lib/banzai/filter/issue_reference_filter.rb
index ce1ab977d3b..6877cae8c55 100644
--- a/lib/banzai/filter/issue_reference_filter.rb
+++ b/lib/banzai/filter/issue_reference_filter.rb
@@ -8,46 +8,24 @@ module Banzai
# When external issues tracker like Jira is activated we should not
# use issue reference pattern, but we should still be able
# to reference issues from other GitLab projects.
- class IssueReferenceFilter < AbstractReferenceFilter
+ class IssueReferenceFilter < IssuableReferenceFilter
self.reference_type = :issue
def self.object_class
Issue
end
- def find_object(project, iid)
- issues_per_project[project][iid]
- end
-
def url_for_object(issue, project)
IssuesHelper.url_for_issue(issue.iid, project, only_path: context[:only_path], internal: true)
end
- def project_from_ref(ref)
- projects_per_reference[ref || current_project_path]
- end
-
- # Returns a Hash containing the issues per Project instance.
- def issues_per_project
- @issues_per_project ||= begin
- hash = Hash.new { |h, k| h[k] = {} }
-
- projects_per_reference.each do |path, project|
- issue_ids = references_per_project[path]
- issues = project.issues.where(iid: issue_ids.to_a)
-
- issues.each do |issue|
- hash[project][issue.iid.to_i] = issue
- end
- end
-
- hash
- end
- end
-
def projects_relation_for_paths(paths)
super(paths).includes(:gitlab_issue_tracker_service)
end
+
+ def parent_records(parent, ids)
+ parent.issues.where(iid: ids.to_a)
+ end
end
end
end
diff --git a/lib/banzai/filter/label_reference_filter.rb b/lib/banzai/filter/label_reference_filter.rb
index 5364984c9d3..d5360ad8f68 100644
--- a/lib/banzai/filter/label_reference_filter.rb
+++ b/lib/banzai/filter/label_reference_filter.rb
@@ -33,7 +33,7 @@ module Banzai
end
def find_label(project_ref, label_id, label_name)
- project = project_from_ref(project_ref)
+ project = parent_from_ref(project_ref)
return unless project
label_params = label_params(label_id, label_name)
@@ -66,7 +66,7 @@ module Banzai
def object_link_text(object, matches)
project_path = full_project_path(matches[:namespace], matches[:project])
- project_from_ref = project_from_ref_cached(project_path)
+ project_from_ref = from_ref_cached(project_path)
reference = project_from_ref.to_human_reference(project)
label_suffix = " <i>in #{reference}</i>" if reference.present?
diff --git a/lib/banzai/filter/markdown_filter.rb b/lib/banzai/filter/markdown_filter.rb
index ee73fa91589..9cac303e645 100644
--- a/lib/banzai/filter/markdown_filter.rb
+++ b/lib/banzai/filter/markdown_filter.rb
@@ -1,6 +1,18 @@
module Banzai
module Filter
class MarkdownFilter < HTML::Pipeline::TextFilter
+ # https://github.com/vmg/redcarpet#and-its-like-really-simple-to-use
+ REDCARPET_OPTIONS = {
+ fenced_code_blocks: true,
+ footnotes: true,
+ lax_spacing: true,
+ no_intra_emphasis: true,
+ space_after_headers: true,
+ strikethrough: true,
+ superscript: true,
+ tables: true
+ }.freeze
+
def initialize(text, context = nil, result = nil)
super text, context, result
@text = @text.delete "\r"
@@ -13,27 +25,11 @@ module Banzai
end
def self.renderer
- @renderer ||= begin
+ Thread.current[:banzai_markdown_renderer] ||= begin
renderer = Banzai::Renderer::HTML.new
- Redcarpet::Markdown.new(renderer, redcarpet_options)
+ Redcarpet::Markdown.new(renderer, REDCARPET_OPTIONS)
end
end
-
- def self.redcarpet_options
- # https://github.com/vmg/redcarpet#and-its-like-really-simple-to-use
- @redcarpet_options ||= {
- fenced_code_blocks: true,
- footnotes: true,
- lax_spacing: true,
- no_intra_emphasis: true,
- space_after_headers: true,
- strikethrough: true,
- superscript: true,
- tables: true
- }.freeze
- end
-
- private_class_method :redcarpet_options
end
end
end
diff --git a/lib/banzai/filter/merge_request_reference_filter.rb b/lib/banzai/filter/merge_request_reference_filter.rb
index 0eab865ac04..b3cfa97d0e0 100644
--- a/lib/banzai/filter/merge_request_reference_filter.rb
+++ b/lib/banzai/filter/merge_request_reference_filter.rb
@@ -4,48 +4,19 @@ module Banzai
# to merge requests that do not exist are ignored.
#
# This filter supports cross-project references.
- class MergeRequestReferenceFilter < AbstractReferenceFilter
+ class MergeRequestReferenceFilter < IssuableReferenceFilter
self.reference_type = :merge_request
def self.object_class
MergeRequest
end
- def find_object(project, iid)
- merge_requests_per_project[project][iid]
- end
-
def url_for_object(mr, project)
h = Gitlab::Routing.url_helpers
h.project_merge_request_url(project, mr,
only_path: context[:only_path])
end
- def project_from_ref(ref)
- projects_per_reference[ref || current_project_path]
- end
-
- # Returns a Hash containing the merge_requests per Project instance.
- def merge_requests_per_project
- @merge_requests_per_project ||= begin
- hash = Hash.new { |h, k| h[k] = {} }
-
- projects_per_reference.each do |path, project|
- merge_request_ids = references_per_project[path]
-
- merge_requests = project.merge_requests
- .where(iid: merge_request_ids.to_a)
- .includes(target_project: :namespace)
-
- merge_requests.each do |merge_request|
- hash[project][merge_request.iid.to_i] = merge_request
- end
- end
-
- hash
- end
- end
-
def object_link_text_extras(object, matches)
extras = super
@@ -61,6 +32,12 @@ module Banzai
extras
end
+
+ def parent_records(parent, ids)
+ parent.merge_requests
+ .where(iid: ids.to_a)
+ .includes(target_project: :namespace)
+ end
end
end
end
diff --git a/lib/banzai/filter/mermaid_filter.rb b/lib/banzai/filter/mermaid_filter.rb
new file mode 100644
index 00000000000..65c131e08d9
--- /dev/null
+++ b/lib/banzai/filter/mermaid_filter.rb
@@ -0,0 +1,11 @@
+module Banzai
+ module Filter
+ class MermaidFilter < HTML::Pipeline::Filter
+ def call
+ doc.css('pre[lang="mermaid"] > code').add_class('js-render-mermaid')
+
+ doc
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/milestone_reference_filter.rb b/lib/banzai/filter/milestone_reference_filter.rb
index 4fc5f211e84..8ec696ce5fc 100644
--- a/lib/banzai/filter/milestone_reference_filter.rb
+++ b/lib/banzai/filter/milestone_reference_filter.rb
@@ -38,7 +38,7 @@ module Banzai
def find_milestone(project_ref, namespace_ref, milestone_id, milestone_name)
project_path = full_project_path(namespace_ref, project_ref)
- project = project_from_ref(project_path)
+ project = parent_from_ref(project_path)
return unless project
@@ -56,7 +56,7 @@ module Banzai
end
def find_milestone_with_finder(project, params)
- finder_params = { project_ids: [project.id], order: nil }
+ finder_params = { project_ids: [project.id], order: nil, state: 'all' }
# We don't support IID lookups for group milestones, because IIDs can
# clash between group and project milestones.
@@ -64,7 +64,7 @@ module Banzai
finder_params[:group_ids] = [project.group.id]
end
- MilestonesFinder.new(finder_params).execute.find_by(params)
+ MilestonesFinder.new(finder_params).find_by(params)
end
def url_for_object(milestone, project)
diff --git a/lib/banzai/filter/reference_filter.rb b/lib/banzai/filter/reference_filter.rb
index a6f8650ed3d..b9d5ecf70ec 100644
--- a/lib/banzai/filter/reference_filter.rb
+++ b/lib/banzai/filter/reference_filter.rb
@@ -8,6 +8,8 @@ module Banzai
# :project (required) - Current project, ignored if reference is cross-project.
# :only_path - Generate path-only links.
class ReferenceFilter < HTML::Pipeline::Filter
+ include RequestStoreReferenceCache
+
class << self
attr_accessor :reference_type
end
@@ -55,6 +57,10 @@ module Banzai
context[:project]
end
+ def group
+ context[:group]
+ end
+
def skip_project_check?
context[:skip_project_check]
end
diff --git a/lib/banzai/filter/relative_link_filter.rb b/lib/banzai/filter/relative_link_filter.rb
index 758f15c8a67..9bdedeb6615 100644
--- a/lib/banzai/filter/relative_link_filter.rb
+++ b/lib/banzai/filter/relative_link_filter.rb
@@ -2,19 +2,21 @@ require 'uri'
module Banzai
module Filter
- # HTML filter that "fixes" relative links to files in a repository.
+ # HTML filter that "fixes" relative links to uploads or files in a repository.
#
# Context options:
# :commit
+ # :group
# :project
# :project_wiki
# :ref
# :requested_path
class RelativeLinkFilter < HTML::Pipeline::Filter
- def call
- return doc unless linkable_files?
+ include Gitlab::Utils::StrongMemoize
+ def call
@uri_types = {}
+ clear_memoization(:linkable_files)
doc.search('a:not(.gfm)').each do |el|
process_link_attr el.attribute('href')
@@ -31,18 +33,47 @@ module Banzai
protected
def linkable_files?
- context[:project_wiki].nil? && repository.try(:exists?) && !repository.empty?
+ strong_memoize(:linkable_files) do
+ context[:project_wiki].nil? && repository.try(:exists?) && !repository.empty?
+ end
end
def process_link_attr(html_attr)
return if html_attr.blank?
return if html_attr.value.start_with?('//')
+ if html_attr.value.start_with?('/uploads/')
+ process_link_to_upload_attr(html_attr)
+ elsif linkable_files?
+ process_link_to_repository_attr(html_attr)
+ end
+ end
+
+ def process_link_to_upload_attr(html_attr)
+ path_parts = [Addressable::URI.unescape(html_attr.value)]
+
+ if group
+ path_parts.unshift(relative_url_root, 'groups', group.full_path, '-')
+ elsif project
+ path_parts.unshift(relative_url_root, project.full_path)
+ end
+
+ path = Addressable::URI.escape(File.join(*path_parts))
+
+ html_attr.value =
+ if context[:only_path]
+ path
+ else
+ Addressable::URI.join(Gitlab.config.gitlab.base_url, path).to_s
+ end
+ end
+
+ def process_link_to_repository_attr(html_attr)
uri = URI(html_attr.value)
if uri.relative? && uri.path.present?
html_attr.value = rebuild_relative_uri(uri).to_s
end
- rescue URI::Error
+ rescue URI::Error, Addressable::URI::InvalidURIError
# noop
end
@@ -51,7 +82,7 @@ module Banzai
uri.path = [
relative_url_root,
- context[:project].full_path,
+ project.full_path,
uri_type(file_path),
Addressable::URI.escape(ref),
Addressable::URI.escape(file_path)
@@ -123,11 +154,19 @@ module Banzai
end
def ref
- context[:ref] || context[:project].default_branch
+ context[:ref] || project.default_branch
+ end
+
+ def group
+ context[:group]
+ end
+
+ def project
+ context[:project]
end
def repository
- @repository ||= context[:project].try(:repository)
+ @repository ||= project&.repository
end
end
end
diff --git a/lib/banzai/filter/sanitization_filter.rb b/lib/banzai/filter/sanitization_filter.rb
index 9923ec4e870..6786b9d07b6 100644
--- a/lib/banzai/filter/sanitization_filter.rb
+++ b/lib/banzai/filter/sanitization_filter.rb
@@ -45,8 +45,9 @@ module Banzai
whitelist[:elements].push('abbr')
whitelist[:attributes]['abbr'] = %w(title)
- # Disallow `name` attribute globally
+ # Disallow `name` attribute globally, allow on `a`
whitelist[:attributes][:all].delete('name')
+ whitelist[:attributes]['a'].push('name')
# Allow any protocol in `a` elements...
whitelist[:protocols].delete('a')
@@ -72,10 +73,21 @@ module Banzai
return unless node.has_attribute?('href')
begin
+ node['href'] = node['href'].strip
uri = Addressable::URI.parse(node['href'])
- uri.scheme = uri.scheme.strip.downcase if uri.scheme
- node.remove_attribute('href') if UNSAFE_PROTOCOLS.include?(uri.scheme)
+ return unless uri.scheme
+
+ # Remove all invalid scheme characters before checking against the
+ # list of unsafe protocols.
+ #
+ # See https://tools.ietf.org/html/rfc3986#section-3.1
+ scheme = uri.scheme
+ .strip
+ .downcase
+ .gsub(/[^A-Za-z0-9\+\.\-]+/, '')
+
+ node.remove_attribute('href') if UNSAFE_PROTOCOLS.include?(scheme)
rescue Addressable::URI::InvalidURIError
node.remove_attribute('href')
end
diff --git a/lib/banzai/filter/syntax_highlight_filter.rb b/lib/banzai/filter/syntax_highlight_filter.rb
index 7da565043d1..0ac7e231b5b 100644
--- a/lib/banzai/filter/syntax_highlight_filter.rb
+++ b/lib/banzai/filter/syntax_highlight_filter.rb
@@ -14,23 +14,36 @@ module Banzai
end
def highlight_node(node)
- language = node.attr('lang')
- code = node.text
- css_classes = "code highlight"
- lexer = lexer_for(language)
- lang = lexer.tag
+ css_classes = 'code highlight js-syntax-highlight'
+ lang = node.attr('lang')
+ retried = false
- begin
- code = Rouge::Formatters::HTMLGitlab.format(lex(lexer, code), tag: lang)
+ if use_rouge?(lang)
+ lexer = lexer_for(lang)
+ language = lexer.tag
+ else
+ lexer = Rouge::Lexers::PlainText.new
+ language = lang
+ end
- css_classes << " js-syntax-highlight #{lang}"
+ begin
+ code = Rouge::Formatters::HTMLGitlab.format(lex(lexer, node.text), tag: language)
+ css_classes << " #{language}" if language
rescue
- lang = nil
- # Gracefully handle syntax highlighter bugs/errors to ensure
- # users can still access an issue/comment/etc.
+ # Gracefully handle syntax highlighter bugs/errors to ensure users can
+ # still access an issue/comment/etc. First, retry with the plain text
+ # filter. If that fails, then just skip this entirely, but that would
+ # be a pretty bad upstream bug.
+ return if retried
+
+ language = nil
+ lexer = Rouge::Lexers::PlainText.new
+ retried = true
+
+ retry
end
- highlighted = %(<pre class="#{css_classes}" lang="#{lang}" v-pre="true"><code>#{code}</code></pre>)
+ highlighted = %(<pre class="#{css_classes}" lang="#{language}" v-pre="true"><code>#{code}</code></pre>)
# Extracted to a method to measure it
replace_parent_pre_element(node, highlighted)
@@ -51,6 +64,10 @@ module Banzai
# Replace the parent `pre` element with the entire highlighted block
node.parent.replace(highlighted)
end
+
+ def use_rouge?(language)
+ %w(math mermaid plantuml).exclude?(language)
+ end
end
end
end
diff --git a/lib/banzai/filter/table_of_contents_filter.rb b/lib/banzai/filter/table_of_contents_filter.rb
index 47151626208..97244159985 100644
--- a/lib/banzai/filter/table_of_contents_filter.rb
+++ b/lib/banzai/filter/table_of_contents_filter.rb
@@ -32,6 +32,7 @@ module Banzai
.gsub(PUNCTUATION_REGEXP, '') # remove punctuation
.tr(' ', '-') # replace spaces with dash
.squeeze('-') # replace multiple dashes with one
+ .gsub(/\A(\d+)\z/, 'anchor-\1') # digits-only hrefs conflict with issue refs
uniq = headers[id] > 0 ? "-#{headers[id]}" : ''
headers[id] += 1
diff --git a/lib/banzai/filter/upload_link_filter.rb b/lib/banzai/filter/upload_link_filter.rb
deleted file mode 100644
index 09844931be5..00000000000
--- a/lib/banzai/filter/upload_link_filter.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-require 'uri'
-
-module Banzai
- module Filter
- # HTML filter that "fixes" relative upload links to files.
- # Context options:
- # :project (required) - Current project
- #
- class UploadLinkFilter < HTML::Pipeline::Filter
- def call
- return doc unless project
-
- doc.xpath('descendant-or-self::a[starts-with(@href, "/uploads/")]').each do |el|
- process_link_attr el.attribute('href')
- end
-
- doc.xpath('descendant-or-self::img[starts-with(@src, "/uploads/")]').each do |el|
- process_link_attr el.attribute('src')
- end
-
- doc
- end
-
- protected
-
- def process_link_attr(html_attr)
- html_attr.value = build_url(html_attr.value).to_s
- end
-
- def build_url(uri)
- File.join(Gitlab.config.gitlab.url, project.full_path, uri)
- end
-
- def project
- context[:project]
- end
-
- # Ensure that a :project key exists in context
- #
- # Note that while the key might exist, its value could be nil!
- def validate
- needs :project
- end
- end
- end
-end
diff --git a/lib/banzai/filter/user_reference_filter.rb b/lib/banzai/filter/user_reference_filter.rb
index f3356d6c51e..c7fa8a8119f 100644
--- a/lib/banzai/filter/user_reference_filter.rb
+++ b/lib/banzai/filter/user_reference_filter.rb
@@ -24,7 +24,7 @@ module Banzai
end
def call
- return doc if project.nil? && !skip_project_check?
+ return doc if project.nil? && group.nil? && !skip_project_check?
ref_pattern = User.reference_pattern
ref_pattern_start = /\A#{ref_pattern}\z/
@@ -60,10 +60,14 @@ module Banzai
self.class.references_in(text) do |match, username|
if username == 'all' && !skip_project_check?
link_to_all(link_content: link_content)
- elsif namespace = namespaces[username.downcase]
- link_to_namespace(namespace, link_content: link_content) || match
else
- match
+ cached_call(:banzai_url_for_object, match, path: [User, username.downcase]) do
+ if namespace = namespaces[username.downcase]
+ link_to_namespace(namespace, link_content: link_content) || match
+ else
+ match
+ end
+ end
end
end
end
@@ -74,7 +78,10 @@ module Banzai
# The keys of this Hash are the namespace paths, the values the
# corresponding Namespace objects.
def namespaces
- @namespaces ||= Namespace.where_full_path_in(usernames).index_by(&:full_path).transform_keys(&:downcase)
+ @namespaces ||= Namespace.eager_load(:owner, :route)
+ .where_full_path_in(usernames)
+ .index_by(&:full_path)
+ .transform_keys(&:downcase)
end
# Returns all usernames referenced in the current document.
@@ -101,19 +108,12 @@ module Banzai
end
def link_to_all(link_content: nil)
- project = context[:project]
author = context[:author]
- if author && !project.team.member?(author)
+ if author && !team_member?(author)
link_content
else
- url = urls.project_url(project,
- only_path: context[:only_path])
-
- data = data_attribute(project: project.id, author: author.try(:id))
- content = link_content || User.reference_prefix + 'all'
-
- link_tag(url, data, content, 'All Project and Group Members')
+ parent_url(link_content, author)
end
end
@@ -144,6 +144,35 @@ module Banzai
def link_tag(url, data, link_content, title)
%(<a href="#{url}" #{data} class="#{link_class}" title="#{escape_once(title)}">#{link_content}</a>)
end
+
+ def parent
+ context[:project] || context[:group]
+ end
+
+ def parent_group?
+ parent.is_a?(Group)
+ end
+
+ def team_member?(user)
+ if parent_group?
+ parent.member?(user)
+ else
+ parent.team.member?(user)
+ end
+ end
+
+ def parent_url(link_content, author)
+ if parent_group?
+ url = urls.group_url(parent, only_path: context[:only_path])
+ data = data_attribute(group: group.id, author: author.try(:id))
+ else
+ url = urls.project_url(parent, only_path: context[:only_path])
+ data = data_attribute(project: project.id, author: author.try(:id))
+ end
+
+ content = link_content || User.reference_prefix + 'all'
+ link_tag(url, data, content, 'All Project and Group Members')
+ end
end
end
end
diff --git a/lib/banzai/filter/wiki_link_filter/rewriter.rb b/lib/banzai/filter/wiki_link_filter/rewriter.rb
index e7a1ec8457d..072d24e5a11 100644
--- a/lib/banzai/filter/wiki_link_filter/rewriter.rb
+++ b/lib/banzai/filter/wiki_link_filter/rewriter.rb
@@ -9,6 +9,10 @@ module Banzai
end
def apply_rules
+ # Special case: relative URLs beginning with `/uploads/` refer to
+ # user-uploaded files and will be handled elsewhere.
+ return @uri.to_s if @uri.relative? && @uri.path.starts_with?('/uploads/')
+
apply_file_link_rules!
apply_hierarchical_link_rules!
apply_relative_link_rules!
diff --git a/lib/banzai/issuable_extractor.rb b/lib/banzai/issuable_extractor.rb
index cbabf9156de..49603d0b363 100644
--- a/lib/banzai/issuable_extractor.rb
+++ b/lib/banzai/issuable_extractor.rb
@@ -28,8 +28,8 @@ module Banzai
issue_parser = Banzai::ReferenceParser::IssueParser.new(project, user)
merge_request_parser = Banzai::ReferenceParser::MergeRequestParser.new(project, user)
- issuables_for_nodes = issue_parser.issues_for_nodes(nodes).merge(
- merge_request_parser.merge_requests_for_nodes(nodes)
+ issuables_for_nodes = issue_parser.records_for_nodes(nodes).merge(
+ merge_request_parser.records_for_nodes(nodes)
)
# The project for the issue/MR might be pending for deletion!
diff --git a/lib/banzai/note_renderer.rb b/lib/banzai/note_renderer.rb
deleted file mode 100644
index 2b7c10f1a0e..00000000000
--- a/lib/banzai/note_renderer.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-module Banzai
- module NoteRenderer
- # Renders a collection of Note instances.
- #
- # notes - The notes to render.
- # project - The project to use for redacting.
- # user - The user viewing the notes.
- # path - The request path.
- # wiki - The project's wiki.
- # git_ref - The current Git reference.
- def self.render(notes, project, user = nil, path = nil, wiki = nil, git_ref = nil)
- renderer = ObjectRenderer.new(project,
- user,
- requested_path: path,
- project_wiki: wiki,
- ref: git_ref)
-
- renderer.render(notes, :note)
- end
- end
-end
diff --git a/lib/banzai/object_renderer.rb b/lib/banzai/object_renderer.rb
index e40556e869c..2691be81623 100644
--- a/lib/banzai/object_renderer.rb
+++ b/lib/banzai/object_renderer.rb
@@ -17,11 +17,11 @@ module Banzai
# project - A Project to use for redacting Markdown.
# user - The user viewing the Markdown/HTML documents, if any.
- # context - A Hash containing extra attributes to use during redaction
+ # redaction_context - A Hash containing extra attributes to use during redaction
def initialize(project, user = nil, redaction_context = {})
@project = project
@user = user
- @redaction_context = redaction_context
+ @redaction_context = base_context.merge(redaction_context)
end
# Renders and redacts an Array of objects.
@@ -37,7 +37,7 @@ module Banzai
objects.each_with_index do |object, index|
redacted_data = redacted[index]
- object.__send__("redacted_#{attribute}_html=", redacted_data[:document].to_html.html_safe) # rubocop:disable GitlabSecurity/PublicSend
+ object.__send__("redacted_#{attribute}_html=", redacted_data[:document].to_html(save_options).html_safe) # rubocop:disable GitlabSecurity/PublicSend
object.user_visible_reference_count = redacted_data[:visible_reference_count] if object.respond_to?(:user_visible_reference_count)
end
end
@@ -73,15 +73,21 @@ module Banzai
# Returns a Banzai context for the given object and attribute.
def context_for(object, attribute)
- base_context.merge(object.banzai_render_context(attribute))
+ @redaction_context.merge(object.banzai_render_context(attribute))
end
def base_context
- @base_context ||= @redaction_context.merge(
+ {
current_user: user,
project: project,
skip_redaction: true
- )
+ }
+ end
+
+ def save_options
+ return {} unless @redaction_context[:xhtml]
+
+ { save_with: Nokogiri::XML::Node::SaveOptions::AS_XHTML }
end
end
end
diff --git a/lib/banzai/pipeline/broadcast_message_pipeline.rb b/lib/banzai/pipeline/broadcast_message_pipeline.rb
index adc09c8afbd..5dd572de3a1 100644
--- a/lib/banzai/pipeline/broadcast_message_pipeline.rb
+++ b/lib/banzai/pipeline/broadcast_message_pipeline.rb
@@ -7,6 +7,7 @@ module Banzai
Filter::SanitizationFilter,
Filter::EmojiFilter,
+ Filter::ColorFilter,
Filter::AutolinkFilter,
Filter::ExternalLinkFilter
]
diff --git a/lib/banzai/pipeline/gfm_pipeline.rb b/lib/banzai/pipeline/gfm_pipeline.rb
index 3208abfc538..4001b8a85e3 100644
--- a/lib/banzai/pipeline/gfm_pipeline.rb
+++ b/lib/banzai/pipeline/gfm_pipeline.rb
@@ -14,7 +14,8 @@ module Banzai
Filter::SyntaxHighlightFilter,
Filter::MathFilter,
- Filter::UploadLinkFilter,
+ Filter::ColorFilter,
+ Filter::MermaidFilter,
Filter::VideoLinkFilter,
Filter::ImageLazyLoadFilter,
Filter::ImageLinkFilter,
diff --git a/lib/banzai/pipeline/post_process_pipeline.rb b/lib/banzai/pipeline/post_process_pipeline.rb
index 131ac3b0eec..dcd52bc03c7 100644
--- a/lib/banzai/pipeline/post_process_pipeline.rb
+++ b/lib/banzai/pipeline/post_process_pipeline.rb
@@ -3,9 +3,10 @@ module Banzai
class PostProcessPipeline < BasePipeline
def self.filters
FilterArray[
+ Filter::RedactorFilter,
Filter::RelativeLinkFilter,
Filter::IssuableStateFilter,
- Filter::RedactorFilter
+ Filter::AbsoluteLinkFilter
]
end
diff --git a/lib/banzai/querying.rb b/lib/banzai/querying.rb
index fb2faae02bc..a19a05e8c0d 100644
--- a/lib/banzai/querying.rb
+++ b/lib/banzai/querying.rb
@@ -52,8 +52,10 @@ module Banzai
children.each do |child|
next if child.text.blank?
+
node = nodes.shift
break unless node == child
+
filtered_nodes << node
end
end
diff --git a/lib/banzai/redactor.rb b/lib/banzai/redactor.rb
index de3ebe72720..fd457bebf03 100644
--- a/lib/banzai/redactor.rb
+++ b/lib/banzai/redactor.rb
@@ -19,8 +19,9 @@ module Banzai
#
# Returns the documents passed as the first argument.
def redact(documents)
- all_document_nodes = document_nodes(documents)
+ redact_cross_project_references(documents) unless can_read_cross_project?
+ all_document_nodes = document_nodes(documents)
redact_document_nodes(all_document_nodes)
end
@@ -41,16 +42,45 @@ module Banzai
next if visible.include?(node)
doc_data[:visible_reference_count] -= 1
- # The reference should be replaced by the original link's content,
- # which is not always the same as the rendered one.
- content = node.attr('data-original') || node.inner_html
- node.replace(content)
+ redacted_content = redacted_node_content(node)
+ node.replace(redacted_content)
end
end
metadata
end
+ # Return redacted content of given node as either the original link (<a> tag),
+ # the original content (text), or the inner HTML of the node.
+ #
+ def redacted_node_content(node)
+ original_content = node.attr('data-original')
+ link_reference = node.attr('data-link-reference')
+
+ # Build the raw <a> tag just with a link as href and content if
+ # it's originally a link pattern. We shouldn't return a plain text href.
+ original_link =
+ if link_reference == 'true' && href = original_content
+ %(<a href="#{href}">#{href}</a>)
+ end
+
+ # The reference should be replaced by the original link's content,
+ # which is not always the same as the rendered one.
+ original_link || original_content || node.inner_html
+ end
+
+ def redact_cross_project_references(documents)
+ extractor = Banzai::IssuableExtractor.new(project, user)
+ issuables = extractor.extract(documents)
+
+ issuables.each do |node, issuable|
+ next if issuable.project == project
+
+ node['class'] = node['class'].gsub('has-tooltip', '')
+ node['title'] = nil
+ end
+ end
+
# Returns the nodes visible to the current user.
#
# nodes - The input nodes to check.
@@ -78,5 +108,11 @@ module Banzai
{ document: document, nodes: Querying.css(document, 'a.gfm[data-reference-type]') }
end
end
+
+ private
+
+ def can_read_cross_project?
+ Ability.allowed?(user, :read_cross_project)
+ end
end
end
diff --git a/lib/banzai/reference_parser/epic_parser.rb b/lib/banzai/reference_parser/epic_parser.rb
new file mode 100644
index 00000000000..08b8a4c9a0f
--- /dev/null
+++ b/lib/banzai/reference_parser/epic_parser.rb
@@ -0,0 +1,12 @@
+module Banzai
+ module ReferenceParser
+ # The actual parser is implemented in the EE mixin
+ class EpicParser < IssuableParser
+ self.reference_type = :epic
+
+ def records_for_nodes(_nodes)
+ {}
+ end
+ end
+ end
+end
diff --git a/lib/banzai/reference_parser/issuable_parser.rb b/lib/banzai/reference_parser/issuable_parser.rb
new file mode 100644
index 00000000000..fad127d7e5b
--- /dev/null
+++ b/lib/banzai/reference_parser/issuable_parser.rb
@@ -0,0 +1,25 @@
+module Banzai
+ module ReferenceParser
+ class IssuableParser < BaseParser
+ def nodes_visible_to_user(user, nodes)
+ records = records_for_nodes(nodes)
+
+ nodes.select do |node|
+ issuable = records[node]
+
+ issuable && can_read_reference?(user, issuable)
+ end
+ end
+
+ def referenced_by(nodes)
+ records = records_for_nodes(nodes)
+
+ nodes.map { |node| records[node] }.compact.uniq
+ end
+
+ def can_read_reference?(user, issuable)
+ can?(user, "read_#{issuable.class.to_s.underscore}_iid".to_sym, issuable)
+ end
+ end
+ end
+end
diff --git a/lib/banzai/reference_parser/issue_parser.rb b/lib/banzai/reference_parser/issue_parser.rb
index e0a8ca653cb..230827129b6 100644
--- a/lib/banzai/reference_parser/issue_parser.rb
+++ b/lib/banzai/reference_parser/issue_parser.rb
@@ -1,26 +1,39 @@
module Banzai
module ReferenceParser
- class IssueParser < BaseParser
+ class IssueParser < IssuableParser
self.reference_type = :issue
def nodes_visible_to_user(user, nodes)
- issues = issues_for_nodes(nodes)
+ issues = records_for_nodes(nodes)
+ issues_to_check = issues.values
- readable_issues = Ability
- .issues_readable_by_user(issues.values, user).to_set
-
- nodes.select do |node|
- readable_issues.include?(issues[node])
+ unless can?(user, :read_cross_project)
+ issues_to_check, cross_project_issues = issues_to_check.partition do |issue|
+ issue.project == project
+ end
end
- end
- def referenced_by(nodes)
- issues = issues_for_nodes(nodes)
+ readable_issues = Ability.issues_readable_by_user(issues_to_check, user).to_set
- nodes.map { |node| issues[node] }.compact.uniq
+ nodes.select do |node|
+ issue_in_node = issues[node]
+
+ # We check the inclusion of readable issues first because it's faster.
+ #
+ # But we need to fall back to `read_issue_iid` if the user cannot read
+ # cross project, since it might be possible the user can see the IID
+ # but not the issue.
+ if readable_issues.include?(issue_in_node)
+ true
+ elsif cross_project_issues&.include?(issue_in_node)
+ can_read_reference?(user, issue_in_node)
+ else
+ false
+ end
+ end
end
- def issues_for_nodes(nodes)
+ def records_for_nodes(nodes)
@issues_for_nodes ||= grouped_objects_for_nodes(
nodes,
Issue.all.includes(
diff --git a/lib/banzai/reference_parser/merge_request_parser.rb b/lib/banzai/reference_parser/merge_request_parser.rb
index 75cbc7fdac4..a370ff5b5b3 100644
--- a/lib/banzai/reference_parser/merge_request_parser.rb
+++ b/lib/banzai/reference_parser/merge_request_parser.rb
@@ -1,25 +1,9 @@
module Banzai
module ReferenceParser
- class MergeRequestParser < BaseParser
+ class MergeRequestParser < IssuableParser
self.reference_type = :merge_request
- def nodes_visible_to_user(user, nodes)
- merge_requests = merge_requests_for_nodes(nodes)
-
- nodes.select do |node|
- merge_request = merge_requests[node]
-
- merge_request && can?(user, :read_merge_request, merge_request.project)
- end
- end
-
- def referenced_by(nodes)
- merge_requests = merge_requests_for_nodes(nodes)
-
- nodes.map { |node| merge_requests[node] }.compact.uniq
- end
-
- def merge_requests_for_nodes(nodes)
+ def records_for_nodes(nodes)
@merge_requests_for_nodes ||= grouped_objects_for_nodes(
nodes,
MergeRequest.includes(
@@ -40,10 +24,6 @@ module Banzai
self.class.data_attribute
)
end
-
- def can_read_reference?(user, ref_project, node)
- can?(user, :read_merge_request, ref_project)
- end
end
end
end
diff --git a/lib/banzai/reference_parser/user_parser.rb b/lib/banzai/reference_parser/user_parser.rb
index 4d336068861..8932d4f2905 100644
--- a/lib/banzai/reference_parser/user_parser.rb
+++ b/lib/banzai/reference_parser/user_parser.rb
@@ -31,6 +31,7 @@ module Banzai
nodes.each do |node|
if node.has_attribute?(group_attr)
next unless can_read_group_reference?(node, user, groups)
+
visible << node
elsif can_read_project_reference?(node)
visible << node
diff --git a/lib/banzai/renderer.rb b/lib/banzai/renderer.rb
index ceca9296851..0050295eeda 100644
--- a/lib/banzai/renderer.rb
+++ b/lib/banzai/renderer.rb
@@ -32,23 +32,20 @@ module Banzai
# Convert a Markdown-containing field on an object into an HTML-safe String
# of HTML. This method is analogous to calling render(object.field), but it
# can cache the rendered HTML in the object, rather than Redis.
- #
- # The context to use is managed by the object and cannot be changed.
- # Use #render, passing it the field text, if a custom rendering is needed.
- def self.render_field(object, field)
+ def self.render_field(object, field, context = {})
unless object.respond_to?(:cached_markdown_fields)
- return cacheless_render_field(object, field)
+ return cacheless_render_field(object, field, context)
end
- object.refresh_markdown_cache!(do_update: update_object?(object)) unless object.cached_html_up_to_date?(field)
+ object.refresh_markdown_cache! unless object.cached_html_up_to_date?(field)
object.cached_html_for(field)
end
# Same as +render_field+, but without consulting or updating the cache field
- def self.cacheless_render_field(object, field, options = {})
+ def self.cacheless_render_field(object, field, context = {})
text = object.__send__(field) # rubocop:disable GitlabSecurity/PublicSend
- context = object.banzai_render_context(field).merge(options)
+ context = context.reverse_merge(object.banzai_render_context(field)) if object.respond_to?(:banzai_render_context)
cacheless_render(text, context)
end
@@ -152,6 +149,7 @@ module Banzai
def self.full_cache_key(cache_key, pipeline_name)
return unless cache_key
+
["banzai", *cache_key, pipeline_name || :full]
end
@@ -160,12 +158,8 @@ module Banzai
# method.
def self.full_cache_multi_key(cache_key, pipeline_name)
return unless cache_key
- Rails.cache.__send__(:expanded_key, full_cache_key(cache_key, pipeline_name)) # rubocop:disable GitlabSecurity/PublicSend
- end
- # GitLab EE needs to disable updates on GET requests in Geo
- def self.update_object?(object)
- true
+ Rails.cache.__send__(:expanded_key, full_cache_key(cache_key, pipeline_name)) # rubocop:disable GitlabSecurity/PublicSend
end
end
end
diff --git a/lib/banzai/request_store_reference_cache.rb b/lib/banzai/request_store_reference_cache.rb
new file mode 100644
index 00000000000..426131442a2
--- /dev/null
+++ b/lib/banzai/request_store_reference_cache.rb
@@ -0,0 +1,27 @@
+module Banzai
+ module RequestStoreReferenceCache
+ def cached_call(request_store_key, cache_key, path: [])
+ if RequestStore.active?
+ cache = RequestStore[request_store_key] ||= Hash.new do |hash, key|
+ hash[key] = Hash.new { |h, k| h[k] = {} }
+ end
+
+ cache = cache.dig(*path) if path.any?
+
+ get_or_set_cache(cache, cache_key) { yield }
+ else
+ yield
+ end
+ end
+
+ def get_or_set_cache(cache, key)
+ if cache.key?(key)
+ cache[key]
+ else
+ value = yield
+ cache[key] = value if key.present?
+ value
+ end
+ end
+ end
+end
diff --git a/lib/bitbucket/connection.rb b/lib/bitbucket/connection.rb
index b9279c33f5b..ba5a9e2f04c 100644
--- a/lib/bitbucket/connection.rb
+++ b/lib/bitbucket/connection.rb
@@ -57,7 +57,7 @@ module Bitbucket
end
def provider
- Gitlab::OAuth::Provider.config_for('bitbucket')
+ Gitlab::Auth::OAuth::Provider.config_for('bitbucket')
end
def options
diff --git a/lib/carrier_wave_string_file.rb b/lib/carrier_wave_string_file.rb
new file mode 100644
index 00000000000..6c848902e4a
--- /dev/null
+++ b/lib/carrier_wave_string_file.rb
@@ -0,0 +1,5 @@
+class CarrierWaveStringFile < StringIO
+ def original_filename
+ ""
+ end
+end
diff --git a/lib/constraints/group_url_constrainer.rb b/lib/constraints/group_url_constrainer.rb
index 6fc1d56d7a0..fd2ac2db0a9 100644
--- a/lib/constraints/group_url_constrainer.rb
+++ b/lib/constraints/group_url_constrainer.rb
@@ -2,7 +2,7 @@ class GroupUrlConstrainer
def matches?(request)
full_path = request.params[:group_id] || request.params[:id]
- return false unless DynamicPathValidator.valid_group_path?(full_path)
+ return false unless NamespacePathValidator.valid_path?(full_path)
Group.find_by_full_path(full_path, follow_redirects: request.get?).present?
end
diff --git a/lib/constraints/project_url_constrainer.rb b/lib/constraints/project_url_constrainer.rb
index 5bef29eb1da..e90ecb5ec69 100644
--- a/lib/constraints/project_url_constrainer.rb
+++ b/lib/constraints/project_url_constrainer.rb
@@ -4,7 +4,7 @@ class ProjectUrlConstrainer
project_path = request.params[:project_id] || request.params[:id]
full_path = [namespace_path, project_path].join('/')
- return false unless DynamicPathValidator.valid_project_path?(full_path)
+ return false unless ProjectPathValidator.valid_path?(full_path)
# We intentionally allow SELECT(*) here so result of this query can be used
# as cache for further Project.find_by_full_path calls within request
diff --git a/lib/constraints/user_url_constrainer.rb b/lib/constraints/user_url_constrainer.rb
index d16ae7f3f40..3b3ed1c6ddb 100644
--- a/lib/constraints/user_url_constrainer.rb
+++ b/lib/constraints/user_url_constrainer.rb
@@ -2,7 +2,7 @@ class UserUrlConstrainer
def matches?(request)
full_path = request.params[:username]
- return false unless DynamicPathValidator.valid_user_path?(full_path)
+ return false unless NamespacePathValidator.valid_path?(full_path)
User.find_by_full_path(full_path, follow_redirects: request.get?).present?
end
diff --git a/lib/container_registry/registry.rb b/lib/container_registry/registry.rb
index 63bce655f57..f90d711474a 100644
--- a/lib/container_registry/registry.rb
+++ b/lib/container_registry/registry.rb
@@ -11,7 +11,7 @@ module ContainerRegistry
private
def default_path
- @uri.sub(/^https?:\/\//, '')
+ @uri.sub(%r{^https?://}, '')
end
end
end
diff --git a/lib/declarative_policy.rb b/lib/declarative_policy.rb
index ae65653645b..b1949d693ad 100644
--- a/lib/declarative_policy.rb
+++ b/lib/declarative_policy.rb
@@ -30,6 +30,7 @@ module DeclarativePolicy
policy_class = class_for_class(subject.class)
raise "no policy for #{subject.class.name}" if policy_class.nil?
+
policy_class
end
@@ -84,6 +85,7 @@ module DeclarativePolicy
while subject.respond_to?(:declarative_policy_delegate)
raise ArgumentError, "circular delegations" if seen.include?(subject.object_id)
+
seen << subject.object_id
subject = subject.declarative_policy_delegate
end
diff --git a/lib/declarative_policy/base.rb b/lib/declarative_policy/base.rb
index b028169f500..47542194497 100644
--- a/lib/declarative_policy/base.rb
+++ b/lib/declarative_policy/base.rb
@@ -276,6 +276,7 @@ module DeclarativePolicy
# boolean `false`
def cache(key, &b)
return @cache[key] if cached?(key)
+
@cache[key] = yield
end
@@ -291,6 +292,7 @@ module DeclarativePolicy
@_conditions[name] ||=
begin
raise "invalid condition #{name}" unless self.class.conditions.key?(name)
+
ManifestCondition.new(self.class.conditions[name], self)
end
end
diff --git a/lib/declarative_policy/cache.rb b/lib/declarative_policy/cache.rb
index 0804edba016..780d8f707bd 100644
--- a/lib/declarative_policy/cache.rb
+++ b/lib/declarative_policy/cache.rb
@@ -3,6 +3,7 @@ module DeclarativePolicy
class << self
def user_key(user)
return '<anonymous>' if user.nil?
+
id_for(user)
end
@@ -15,6 +16,7 @@ module DeclarativePolicy
def subject_key(subject)
return '<nil>' if subject.nil?
return subject.inspect if subject.is_a?(Symbol)
+
"#{subject.class.name}:#{id_for(subject)}"
end
diff --git a/lib/declarative_policy/rule.rb b/lib/declarative_policy/rule.rb
index bfcec241489..e309244a3b3 100644
--- a/lib/declarative_policy/rule.rb
+++ b/lib/declarative_policy/rule.rb
@@ -83,6 +83,7 @@ module DeclarativePolicy
def cached_pass?(context)
condition = context.condition(@name)
return nil unless condition.cached?
+
condition.pass?
end
@@ -109,6 +110,7 @@ module DeclarativePolicy
def delegated_context(context)
policy = context.delegated_policies[@delegate_name]
raise MissingDelegate if policy.nil?
+
policy
end
@@ -121,6 +123,7 @@ module DeclarativePolicy
def cached_pass?(context)
condition = delegated_context(context).condition(@name)
return nil unless condition.cached?
+
condition.pass?
rescue MissingDelegate
false
@@ -157,6 +160,7 @@ module DeclarativePolicy
def cached_pass?(context)
runner = context.runner(@ability)
return nil unless runner.cached?
+
runner.pass?
end
@@ -206,11 +210,13 @@ module DeclarativePolicy
end
def cached_pass?(context)
- passes = @rules.map { |r| r.cached_pass?(context) }
- return false if passes.any? { |p| p == false }
- return true if passes.all? { |p| p == true }
+ @rules.each do |rule|
+ pass = rule.cached_pass?(context)
+
+ return pass if pass.nil? || pass == false
+ end
- nil
+ true
end
def repr
@@ -245,15 +251,18 @@ module DeclarativePolicy
end
def cached_pass?(context)
- passes = @rules.map { |r| r.cached_pass?(context) }
- return true if passes.any? { |p| p == true }
- return false if passes.all? { |p| p == false }
+ @rules.each do |rule|
+ pass = rule.cached_pass?(context)
+
+ return pass if pass.nil? || pass == true
+ end
- nil
+ false
end
def score(context)
return 0 unless cached_pass?(context).nil?
+
@rules.map { |r| r.score(context) }.inject(0, :+)
end
diff --git a/lib/declarative_policy/runner.rb b/lib/declarative_policy/runner.rb
index 56afd1f1392..77c91817382 100644
--- a/lib/declarative_policy/runner.rb
+++ b/lib/declarative_policy/runner.rb
@@ -43,6 +43,7 @@ module DeclarativePolicy
# used by Rule::Ability. See #steps_by_score
def score
return 0 if cached?
+
steps.map(&:score).inject(0, :+)
end
@@ -107,7 +108,7 @@ module DeclarativePolicy
end
# This is the core spot where all those `#score` methods matter.
- # It is critcal for performance to run steps in the correct order,
+ # It is critical for performance to run steps in the correct order,
# so that we don't compute expensive conditions (potentially n times
# if we're called on, say, a large list of users).
#
@@ -139,30 +140,39 @@ module DeclarativePolicy
return
end
- steps = Set.new(@steps)
- remaining_enablers = steps.count { |s| s.enable? }
+ remaining_steps = Set.new(@steps)
+ remaining_enablers, remaining_preventers = remaining_steps.partition(&:enable?).map { |s| Set.new(s) }
loop do
- return if steps.empty?
+ if @state.enabled?
+ # Once we set this, we never need to unset it, because a single
+ # prevent will stop this from being enabled
+ remaining_steps = remaining_preventers
+ else
+ # if the permission hasn't yet been enabled and we only have
+ # prevent steps left, we short-circuit the state here
+ @state.prevent! if remaining_enablers.empty?
+ end
- # if the permission hasn't yet been enabled and we only have
- # prevent steps left, we short-circuit the state here
- @state.prevent! if !@state.enabled? && remaining_enablers == 0
+ return if remaining_steps.empty?
lowest_score = Float::INFINITY
next_step = nil
- steps.each do |step|
+ remaining_steps.each do |step|
score = step.score
+
if score < lowest_score
next_step = step
lowest_score = score
end
- end
- steps.delete(next_step)
+ break if lowest_score.zero?
+ end
- remaining_enablers -= 1 if next_step.enable?
+ [remaining_steps, remaining_enablers, remaining_preventers].each do |set|
+ set.delete(next_step)
+ end
yield next_step, lowest_score
end
diff --git a/lib/email_template_interceptor.rb b/lib/email_template_interceptor.rb
index f2bf3d0fb2b..3978a6d9fe4 100644
--- a/lib/email_template_interceptor.rb
+++ b/lib/email_template_interceptor.rb
@@ -1,10 +1,8 @@
# Read about interceptors in http://guides.rubyonrails.org/action_mailer_basics.html#intercepting-emails
class EmailTemplateInterceptor
- extend Gitlab::CurrentSettings
-
def self.delivering_email(message)
# Remove HTML part if HTML emails are disabled.
- unless current_application_settings.html_emails_enabled
+ unless Gitlab::CurrentSettings.html_emails_enabled
message.parts.delete_if do |part|
part.content_type.start_with?('text/html')
end
diff --git a/lib/extracts_path.rb b/lib/extracts_path.rb
index 721ed97bb6b..a9b04c183ad 100644
--- a/lib/extracts_path.rb
+++ b/lib/extracts_path.rb
@@ -40,7 +40,7 @@ module ExtractsPath
def extract_ref(id)
pair = ['', '']
- return pair unless @project
+ return pair unless @project # rubocop:disable Gitlab/ModuleWithInstanceVariables
if id =~ /^(\h{40})(.+)/
# If the ref appears to be a SHA, we're done, just split the string
@@ -56,7 +56,7 @@ module ExtractsPath
if valid_refs.length == 0
# No exact ref match, so just try our best
- pair = id.match(/([^\/]+)(.*)/).captures
+ pair = id.match(%r{([^/]+)(.*)}).captures
else
# There is a distinct possibility that multiple refs prefix the ID.
# Use the longest match to maximize the chance that we have the
@@ -68,7 +68,7 @@ module ExtractsPath
end
# Remove ending slashes from path
- pair[1].gsub!(/^\/|\/$/, '')
+ pair[1].gsub!(%r{^/|/$}, '')
pair
end
@@ -104,6 +104,7 @@ module ExtractsPath
#
# Automatically renders `not_found!` if a valid tree path could not be
# resolved (e.g., when a user inserts an invalid path or ref).
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
def assign_ref_vars
# assign allowed options
allowed_options = ["filter_ref"]
@@ -127,13 +128,18 @@ module ExtractsPath
@hex_path = Digest::SHA1.hexdigest(@path)
@logs_path = logs_file_project_ref_path(@project, @ref, @path)
-
rescue RuntimeError, NoMethodError, InvalidPathError
render_404
end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
def tree
- @tree ||= @repo.tree(@commit.id, @path)
+ @tree ||= @repo.tree(@commit.id, @path) # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ end
+
+ def lfs_blob_ids
+ blob_ids = tree.blobs.map(&:id)
+ @lfs_blob_ids = Gitlab::Git::Blob.batch_lfs_pointers(@project.repository, blob_ids).map(&:id) # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
private
@@ -146,8 +152,8 @@ module ExtractsPath
end
def ref_names
- return [] unless @project
+ return [] unless @project # rubocop:disable Gitlab/ModuleWithInstanceVariables
- @ref_names ||= @project.repository.ref_names
+ @ref_names ||= @project.repository.ref_names # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end
diff --git a/lib/feature.rb b/lib/feature.rb
index 4bd29aed687..8e9ba5c530a 100644
--- a/lib/feature.rb
+++ b/lib/feature.rb
@@ -1,10 +1,12 @@
-require 'flipper/adapters/active_record'
-
class Feature
# Classes to override flipper table names
class FlipperFeature < Flipper::Adapters::ActiveRecord::Feature
# Using `self.table_name` won't work. ActiveRecord bug?
superclass.table_name = 'features'
+
+ def self.feature_names
+ pluck(:key)
+ end
end
class FlipperGate < Flipper::Adapters::ActiveRecord::Gate
@@ -22,11 +24,19 @@ class Feature
flipper.feature(key)
end
+ def persisted_names
+ if RequestStore.active?
+ RequestStore[:flipper_persisted_names] ||= FlipperFeature.feature_names
+ else
+ FlipperFeature.feature_names
+ end
+ end
+
def persisted?(feature)
# Flipper creates on-memory features when asked for a not-yet-created one.
# If we want to check if a feature has been actually set, we look for it
# on the persisted features list.
- all.map(&:name).include?(feature.name)
+ persisted_names.include?(feature.name)
end
def enabled?(key, thing = nil)
@@ -50,12 +60,7 @@ class Feature
end
def flipper
- @flipper ||= begin
- adapter = Flipper::Adapters::ActiveRecord.new(
- feature_class: FlipperFeature, gate_class: FlipperGate)
-
- Flipper.new(adapter)
- end
+ @flipper ||= Flipper.instance
end
# This method is called from config/initializers/flipper.rb and can be used
diff --git a/lib/file_size_validator.rb b/lib/file_size_validator.rb
index de391de9059..69d981e8be9 100644
--- a/lib/file_size_validator.rb
+++ b/lib/file_size_validator.rb
@@ -8,6 +8,7 @@ class FileSizeValidator < ActiveModel::EachValidator
def initialize(options)
if range = (options.delete(:in) || options.delete(:within))
raise ArgumentError, ":in and :within must be a Range" unless range.is_a?(Range)
+
options[:minimum], options[:maximum] = range.begin, range.end
options[:maximum] -= 1 if range.exclude_end?
end
diff --git a/lib/gitaly/server.rb b/lib/gitaly/server.rb
new file mode 100644
index 00000000000..605e93022e7
--- /dev/null
+++ b/lib/gitaly/server.rb
@@ -0,0 +1,43 @@
+module Gitaly
+ class Server
+ def self.all
+ Gitlab.config.repositories.storages.keys.map { |s| Gitaly::Server.new(s) }
+ end
+
+ attr_reader :storage
+
+ def initialize(storage)
+ @storage = storage
+ end
+
+ def server_version
+ info.server_version
+ end
+
+ def git_binary_version
+ info.git_version
+ end
+
+ def up_to_date?
+ server_version == Gitlab::GitalyClient.expected_server_version
+ end
+
+ def address
+ Gitlab::GitalyClient.address(@storage)
+ rescue RuntimeError => e
+ "Error getting the address: #{e.message}"
+ end
+
+ private
+
+ def info
+ @info ||=
+ begin
+ Gitlab::GitalyClient::ServerService.new(@storage).info
+ rescue GRPC::Unavailable, GRPC::GRPC::DeadlineExceeded
+ # This will show the server as being out of date
+ Gitaly::ServerInfoResponse.new(git_version: '', server_version: '')
+ end
+ end
+ end
+end
diff --git a/lib/github/client.rb b/lib/github/client.rb
deleted file mode 100644
index 9c476df7d46..00000000000
--- a/lib/github/client.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-module Github
- class Client
- TIMEOUT = 60
-
- attr_reader :connection, :rate_limit
-
- def initialize(options)
- @connection = Faraday.new(url: options.fetch(:url, root_endpoint)) do |faraday|
- faraday.options.open_timeout = options.fetch(:timeout, TIMEOUT)
- faraday.options.timeout = options.fetch(:timeout, TIMEOUT)
- faraday.authorization 'token', options.fetch(:token)
- faraday.adapter :net_http
- faraday.ssl.verify = verify_ssl
- end
-
- @rate_limit = RateLimit.new(connection)
- end
-
- def get(url, query = {})
- exceed, reset_in = rate_limit.get
- sleep reset_in if exceed
-
- Github::Response.new(connection.get(url, query))
- end
-
- private
-
- def root_endpoint
- custom_endpoint || github_endpoint
- end
-
- def custom_endpoint
- github_omniauth_provider.dig('args', 'client_options', 'site')
- end
-
- def verify_ssl
- # If there is no config, we're connecting to github.com
- # and we should verify ssl.
- github_omniauth_provider.fetch('verify_ssl', true)
- end
-
- def github_endpoint
- OmniAuth::Strategies::GitHub.default_options[:client_options][:site]
- end
-
- def github_omniauth_provider
- @github_omniauth_provider ||=
- Gitlab.config.omniauth.providers
- .find { |provider| provider.name == 'github' }
- .to_h
- end
- end
-end
diff --git a/lib/github/collection.rb b/lib/github/collection.rb
deleted file mode 100644
index 014b2038c4b..00000000000
--- a/lib/github/collection.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-module Github
- class Collection
- attr_reader :options
-
- def initialize(options)
- @options = options
- end
-
- def fetch(url, query = {})
- return [] if url.blank?
-
- Enumerator.new do |yielder|
- loop do
- response = client.get(url, query)
- response.body.each { |item| yielder << item }
-
- raise StopIteration unless response.rels.key?(:next)
- url = response.rels[:next]
- end
- end.lazy
- end
-
- private
-
- def client
- @client ||= Github::Client.new(options)
- end
- end
-end
diff --git a/lib/github/error.rb b/lib/github/error.rb
deleted file mode 100644
index 66d7afaa787..00000000000
--- a/lib/github/error.rb
+++ /dev/null
@@ -1,3 +0,0 @@
-module Github
- RepositoryFetchError = Class.new(StandardError)
-end
diff --git a/lib/github/import.rb b/lib/github/import.rb
deleted file mode 100644
index 9354e142d3d..00000000000
--- a/lib/github/import.rb
+++ /dev/null
@@ -1,392 +0,0 @@
-require_relative 'error'
-
-module Github
- class Import
- include Gitlab::ShellAdapter
-
- class MergeRequest < ::MergeRequest
- self.table_name = 'merge_requests'
-
- self.reset_callbacks :create
- self.reset_callbacks :save
- self.reset_callbacks :commit
- self.reset_callbacks :update
- self.reset_callbacks :validate
- end
-
- class Issue < ::Issue
- self.table_name = 'issues'
-
- self.reset_callbacks :save
- self.reset_callbacks :create
- self.reset_callbacks :commit
- self.reset_callbacks :update
- self.reset_callbacks :validate
- end
-
- class Note < ::Note
- self.table_name = 'notes'
-
- self.reset_callbacks :save
- self.reset_callbacks :commit
- self.reset_callbacks :update
- self.reset_callbacks :validate
- end
-
- class LegacyDiffNote < ::LegacyDiffNote
- self.table_name = 'notes'
-
- self.reset_callbacks :commit
- self.reset_callbacks :update
- self.reset_callbacks :validate
- end
-
- attr_reader :project, :repository, :repo, :repo_url, :wiki_url,
- :options, :errors, :cached, :verbose
-
- def initialize(project, options = {})
- @project = project
- @repository = project.repository
- @repo = project.import_source
- @repo_url = project.import_url
- @wiki_url = project.import_url.sub(/\.git\z/, '.wiki.git')
- @options = options.reverse_merge(token: project.import_data&.credentials&.fetch(:user))
- @verbose = options.fetch(:verbose, false)
- @cached = Hash.new { |hash, key| hash[key] = Hash.new }
- @errors = []
- end
-
- # rubocop: disable Rails/Output
- def execute
- puts 'Fetching repository...'.color(:aqua) if verbose
- fetch_repository
- puts 'Fetching labels...'.color(:aqua) if verbose
- fetch_labels
- puts 'Fetching milestones...'.color(:aqua) if verbose
- fetch_milestones
- puts 'Fetching pull requests...'.color(:aqua) if verbose
- fetch_pull_requests
- puts 'Fetching issues...'.color(:aqua) if verbose
- fetch_issues
- puts 'Fetching releases...'.color(:aqua) if verbose
- fetch_releases
- puts 'Cloning wiki repository...'.color(:aqua) if verbose
- fetch_wiki_repository
- puts 'Expiring repository cache...'.color(:aqua) if verbose
- expire_repository_cache
-
- true
- rescue Github::RepositoryFetchError
- expire_repository_cache
- false
- ensure
- keep_track_of_errors
- end
-
- private
-
- def fetch_repository
- begin
- project.ensure_repository
- project.repository.add_remote('github', repo_url)
- project.repository.set_remote_as_mirror('github')
- project.repository.fetch_remote('github', forced: true)
- rescue Gitlab::Git::Repository::NoRepository, Gitlab::Shell::Error => e
- error(:project, repo_url, e.message)
- raise Github::RepositoryFetchError
- end
- end
-
- def fetch_wiki_repository
- return if project.wiki.repository_exists?
-
- wiki_path = "#{project.disk_path}.wiki"
- gitlab_shell.import_repository(project.repository_storage_path, wiki_path, wiki_url)
- rescue Gitlab::Shell::Error => e
- # GitHub error message when the wiki repo has not been created,
- # this means that repo has wiki enabled, but have no pages. So,
- # we can skip the import.
- if e.message !~ /repository not exported/
- error(:wiki, wiki_url, e.message)
- end
- end
-
- def fetch_labels
- url = "/repos/#{repo}/labels"
-
- while url
- response = Github::Client.new(options).get(url)
-
- response.body.each do |raw|
- begin
- representation = Github::Representation::Label.new(raw)
-
- label = project.labels.find_or_create_by!(title: representation.title) do |label|
- label.color = representation.color
- end
-
- cached[:label_ids][label.title] = label.id
- rescue => e
- error(:label, representation.url, e.message)
- end
- end
-
- url = response.rels[:next]
- end
- end
-
- def fetch_milestones
- url = "/repos/#{repo}/milestones"
-
- while url
- response = Github::Client.new(options).get(url, state: :all)
-
- response.body.each do |raw|
- begin
- milestone = Github::Representation::Milestone.new(raw)
- next if project.milestones.where(iid: milestone.iid).exists?
-
- project.milestones.create!(
- iid: milestone.iid,
- title: milestone.title,
- description: milestone.description,
- due_date: milestone.due_date,
- state: milestone.state,
- created_at: milestone.created_at,
- updated_at: milestone.updated_at
- )
- rescue => e
- error(:milestone, milestone.url, e.message)
- end
- end
-
- url = response.rels[:next]
- end
- end
-
- def fetch_pull_requests
- url = "/repos/#{repo}/pulls"
-
- while url
- response = Github::Client.new(options).get(url, state: :all, sort: :created, direction: :asc)
-
- response.body.each do |raw|
- pull_request = Github::Representation::PullRequest.new(raw, options.merge(project: project))
- merge_request = MergeRequest.find_or_initialize_by(iid: pull_request.iid, source_project_id: project.id)
- next unless merge_request.new_record? && pull_request.valid?
-
- begin
- pull_request.restore_branches!
-
- author_id = user_id(pull_request.author, project.creator_id)
- description = format_description(pull_request.description, pull_request.author)
-
- merge_request.attributes = {
- iid: pull_request.iid,
- title: pull_request.title,
- description: description,
- source_project: pull_request.source_project,
- source_branch: pull_request.source_branch_name,
- source_branch_sha: pull_request.source_branch_sha,
- target_project: pull_request.target_project,
- target_branch: pull_request.target_branch_name,
- target_branch_sha: pull_request.target_branch_sha,
- state: pull_request.state,
- milestone_id: milestone_id(pull_request.milestone),
- author_id: author_id,
- assignee_id: user_id(pull_request.assignee),
- created_at: pull_request.created_at,
- updated_at: pull_request.updated_at
- }
-
- merge_request.save!(validate: false)
- merge_request.merge_request_diffs.create
-
- # Fetch review comments
- review_comments_url = "/repos/#{repo}/pulls/#{pull_request.iid}/comments"
- fetch_comments(merge_request, :review_comment, review_comments_url, LegacyDiffNote)
-
- # Fetch comments
- comments_url = "/repos/#{repo}/issues/#{pull_request.iid}/comments"
- fetch_comments(merge_request, :comment, comments_url)
- rescue => e
- error(:pull_request, pull_request.url, e.message)
- ensure
- pull_request.remove_restored_branches!
- end
- end
-
- url = response.rels[:next]
- end
- end
-
- def fetch_issues
- url = "/repos/#{repo}/issues"
-
- while url
- response = Github::Client.new(options).get(url, state: :all, sort: :created, direction: :asc)
-
- response.body.each { |raw| populate_issue(raw) }
-
- url = response.rels[:next]
- end
- end
-
- def populate_issue(raw)
- representation = Github::Representation::Issue.new(raw, options)
-
- begin
- # Every pull request is an issue, but not every issue
- # is a pull request. For this reason, "shared" actions
- # for both features, like manipulating assignees, labels
- # and milestones, are provided within the Issues API.
- if representation.pull_request?
- return unless representation.has_labels?
-
- merge_request = MergeRequest.find_by!(target_project_id: project.id, iid: representation.iid)
- merge_request.update_attribute(:label_ids, label_ids(representation.labels))
- else
- return if Issue.where(iid: representation.iid, project_id: project.id).exists?
-
- author_id = user_id(representation.author, project.creator_id)
- issue = Issue.new
- issue.iid = representation.iid
- issue.project_id = project.id
- issue.title = representation.title
- issue.description = format_description(representation.description, representation.author)
- issue.state = representation.state
- issue.label_ids = label_ids(representation.labels)
- issue.milestone_id = milestone_id(representation.milestone)
- issue.author_id = author_id
- issue.assignee_ids = [user_id(representation.assignee)]
- issue.created_at = representation.created_at
- issue.updated_at = representation.updated_at
- issue.save!(validate: false)
-
- # Fetch comments
- if representation.has_comments?
- comments_url = "/repos/#{repo}/issues/#{issue.iid}/comments"
- fetch_comments(issue, :comment, comments_url)
- end
- end
- rescue => e
- error(:issue, representation.url, e.message)
- end
- end
-
- def fetch_comments(noteable, type, url, klass = Note)
- while url
- comments = Github::Client.new(options).get(url)
-
- ActiveRecord::Base.no_touching do
- comments.body.each do |raw|
- begin
- representation = Github::Representation::Comment.new(raw, options)
- author_id = user_id(representation.author, project.creator_id)
-
- note = klass.new
- note.project_id = project.id
- note.noteable = noteable
- note.note = format_description(representation.note, representation.author)
- note.commit_id = representation.commit_id
- note.line_code = representation.line_code
- note.author_id = author_id
- note.created_at = representation.created_at
- note.updated_at = representation.updated_at
- note.save!(validate: false)
- rescue => e
- error(type, representation.url, e.message)
- end
- end
- end
-
- url = comments.rels[:next]
- end
- end
-
- def fetch_releases
- url = "/repos/#{repo}/releases"
-
- while url
- response = Github::Client.new(options).get(url)
-
- response.body.each do |raw|
- representation = Github::Representation::Release.new(raw)
- next unless representation.valid?
-
- release = ::Release.find_or_initialize_by(project_id: project.id, tag: representation.tag)
- next unless release.new_record?
-
- begin
- release.description = representation.description
- release.created_at = representation.created_at
- release.updated_at = representation.updated_at
- release.save!(validate: false)
- rescue => e
- error(:release, representation.url, e.message)
- end
- end
-
- url = response.rels[:next]
- end
- end
-
- def label_ids(labels)
- labels.map { |attrs| cached[:label_ids][attrs.fetch('name')] }.compact
- end
-
- def milestone_id(milestone)
- return unless milestone.present?
-
- project.milestones.select(:id).find_by(iid: milestone.iid)&.id
- end
-
- def user_id(user, fallback_id = nil)
- return unless user.present?
- return cached[:user_ids][user.id] if cached[:user_ids][user.id].present?
-
- gitlab_user_id = user_id_by_external_uid(user.id) || user_id_by_email(user.email)
-
- cached[:gitlab_user_ids][user.id] = gitlab_user_id.present?
- cached[:user_ids][user.id] = gitlab_user_id || fallback_id
- end
-
- def user_id_by_email(email)
- return nil unless email
-
- ::User.find_by_any_email(email)&.id
- end
-
- def user_id_by_external_uid(id)
- return nil unless id
-
- ::User.select(:id)
- .joins(:identities)
- .merge(::Identity.where(provider: :github, extern_uid: id))
- .first&.id
- end
-
- def format_description(body, author)
- return body if cached[:gitlab_user_ids][author.id]
-
- "*Created by: #{author.username}*\n\n#{body}"
- end
-
- def expire_repository_cache
- repository.expire_content_cache if project.repository_exists?
- end
-
- def keep_track_of_errors
- return unless errors.any?
-
- project.update_column(:import_error, {
- message: 'The remote data could not be fully imported.',
- errors: errors
- }.to_json)
- end
-
- def error(type, url, message)
- errors << { type: type, url: Gitlab::UrlSanitizer.sanitize(url), error: message }
- end
- end
-end
diff --git a/lib/github/rate_limit.rb b/lib/github/rate_limit.rb
deleted file mode 100644
index 884693d093c..00000000000
--- a/lib/github/rate_limit.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-module Github
- class RateLimit
- SAFE_REMAINING_REQUESTS = 100
- SAFE_RESET_TIME = 500
- RATE_LIMIT_URL = '/rate_limit'.freeze
-
- attr_reader :connection
-
- def initialize(connection)
- @connection = connection
- end
-
- def get
- response = connection.get(RATE_LIMIT_URL)
-
- # GitHub Rate Limit API returns 404 when the rate limit is disabled
- return false unless response.status != 404
-
- body = Oj.load(response.body, class_cache: false, mode: :compat)
- remaining = body.dig('rate', 'remaining').to_i
- reset_in = body.dig('rate', 'reset').to_i
- exceed = remaining <= SAFE_REMAINING_REQUESTS
-
- [exceed, reset_in]
- end
- end
-end
diff --git a/lib/github/repositories.rb b/lib/github/repositories.rb
deleted file mode 100644
index c1c9448f305..00000000000
--- a/lib/github/repositories.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-module Github
- class Repositories
- attr_reader :options
-
- def initialize(options)
- @options = options
- end
-
- def fetch
- Collection.new(options).fetch(repos_url)
- end
-
- private
-
- def repos_url
- '/user/repos'
- end
- end
-end
diff --git a/lib/github/representation/base.rb b/lib/github/representation/base.rb
deleted file mode 100644
index f26bdbdd546..00000000000
--- a/lib/github/representation/base.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-module Github
- module Representation
- class Base
- def initialize(raw, options = {})
- @raw = raw
- @options = options
- end
-
- def id
- raw['id']
- end
-
- def url
- raw['url']
- end
-
- def created_at
- raw['created_at']
- end
-
- def updated_at
- raw['updated_at']
- end
-
- private
-
- attr_reader :raw, :options
- end
- end
-end
diff --git a/lib/github/representation/branch.rb b/lib/github/representation/branch.rb
deleted file mode 100644
index 823e8e9a9c4..00000000000
--- a/lib/github/representation/branch.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-module Github
- module Representation
- class Branch < Representation::Base
- attr_reader :repository
-
- def user
- raw.dig('user', 'login') || 'unknown'
- end
-
- def repo
- return @repo if defined?(@repo)
-
- @repo = Github::Representation::Repo.new(raw['repo']) if raw['repo'].present?
- end
-
- def ref
- raw['ref']
- end
-
- def sha
- raw['sha']
- end
-
- def short_sha
- Commit.truncate_sha(sha)
- end
-
- def exists?
- @exists ||= branch_exists? && commit_exists?
- end
-
- def valid?
- sha.present? && ref.present?
- end
-
- def restore!(name)
- repository.create_branch(name, sha)
- rescue Gitlab::Git::Repository::InvalidRef => e
- Rails.logger.error("#{self.class.name}: Could not restore branch #{name}: #{e}")
- end
-
- def remove!(name)
- repository.delete_branch(name)
- rescue Gitlab::Git::Repository::DeleteBranchError => e
- Rails.logger.error("#{self.class.name}: Could not remove branch #{name}: #{e}")
- end
-
- private
-
- def branch_exists?
- repository.branch_exists?(ref)
- end
-
- def commit_exists?
- repository.branch_names_contains(sha).include?(ref)
- end
-
- def repository
- @repository ||= options.fetch(:repository)
- end
- end
- end
-end
diff --git a/lib/github/representation/comment.rb b/lib/github/representation/comment.rb
deleted file mode 100644
index 1b5be91461b..00000000000
--- a/lib/github/representation/comment.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-module Github
- module Representation
- class Comment < Representation::Base
- def note
- raw['body'] || ''
- end
-
- def author
- @author ||= Github::Representation::User.new(raw['user'], options)
- end
-
- def commit_id
- raw['commit_id']
- end
-
- def line_code
- return unless on_diff?
-
- parsed_lines = Gitlab::Diff::Parser.new.parse(diff_hunk.lines)
- generate_line_code(parsed_lines.to_a.last)
- end
-
- private
-
- def generate_line_code(line)
- Gitlab::Diff::LineCode.generate(file_path, line.new_pos, line.old_pos)
- end
-
- def on_diff?
- diff_hunk.present?
- end
-
- def diff_hunk
- raw['diff_hunk']
- end
-
- def file_path
- raw['path']
- end
- end
- end
-end
diff --git a/lib/github/representation/issuable.rb b/lib/github/representation/issuable.rb
deleted file mode 100644
index 9713b82615d..00000000000
--- a/lib/github/representation/issuable.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-module Github
- module Representation
- class Issuable < Representation::Base
- def iid
- raw['number']
- end
-
- def title
- raw['title']
- end
-
- def description
- raw['body'] || ''
- end
-
- def milestone
- return unless raw['milestone'].present?
-
- @milestone ||= Github::Representation::Milestone.new(raw['milestone'])
- end
-
- def author
- @author ||= Github::Representation::User.new(raw['user'], options)
- end
-
- def assignee
- return unless assigned?
-
- @assignee ||= Github::Representation::User.new(raw['assignee'], options)
- end
-
- def assigned?
- raw['assignee'].present?
- end
- end
- end
-end
diff --git a/lib/github/representation/issue.rb b/lib/github/representation/issue.rb
deleted file mode 100644
index df3540a6e6c..00000000000
--- a/lib/github/representation/issue.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-module Github
- module Representation
- class Issue < Representation::Issuable
- def labels
- raw['labels']
- end
-
- def state
- raw['state'] == 'closed' ? 'closed' : 'opened'
- end
-
- def has_comments?
- raw['comments'] > 0
- end
-
- def has_labels?
- labels.count > 0
- end
-
- def pull_request?
- raw['pull_request'].present?
- end
- end
- end
-end
diff --git a/lib/github/representation/label.rb b/lib/github/representation/label.rb
deleted file mode 100644
index 60aa51f9569..00000000000
--- a/lib/github/representation/label.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-module Github
- module Representation
- class Label < Representation::Base
- def color
- "##{raw['color']}"
- end
-
- def title
- raw['name']
- end
- end
- end
-end
diff --git a/lib/github/representation/milestone.rb b/lib/github/representation/milestone.rb
deleted file mode 100644
index 917e6394ad4..00000000000
--- a/lib/github/representation/milestone.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-module Github
- module Representation
- class Milestone < Representation::Base
- def iid
- raw['number']
- end
-
- def title
- raw['title']
- end
-
- def description
- raw['description']
- end
-
- def due_date
- raw['due_on']
- end
-
- def state
- raw['state'] == 'closed' ? 'closed' : 'active'
- end
- end
- end
-end
diff --git a/lib/github/representation/pull_request.rb b/lib/github/representation/pull_request.rb
deleted file mode 100644
index 55461097e8a..00000000000
--- a/lib/github/representation/pull_request.rb
+++ /dev/null
@@ -1,120 +0,0 @@
-module Github
- module Representation
- class PullRequest < Representation::Issuable
- delegate :user, :repo, :ref, :sha, to: :source_branch, prefix: true
- delegate :user, :exists?, :repo, :ref, :sha, :short_sha, to: :target_branch, prefix: true
-
- def source_project
- project
- end
-
- def source_branch_name
- @source_branch_name ||=
- if cross_project? || !source_branch_exists?
- source_branch_name_prefixed
- else
- source_branch_ref
- end
- end
-
- def source_branch_exists?
- return @source_branch_exists if defined?(@source_branch_exists)
-
- @source_branch_exists = !cross_project? && source_branch.exists?
- end
-
- def target_project
- project
- end
-
- def target_branch_name
- @target_branch_name ||= target_branch_exists? ? target_branch_ref : target_branch_name_prefixed
- end
-
- def target_branch_exists?
- @target_branch_exists ||= target_branch.exists?
- end
-
- def state
- return 'merged' if raw['state'] == 'closed' && raw['merged_at'].present?
- return 'closed' if raw['state'] == 'closed'
-
- 'opened'
- end
-
- def opened?
- state == 'opened'
- end
-
- def valid?
- source_branch.valid? && target_branch.valid?
- end
-
- def restore_branches!
- restore_source_branch!
- restore_target_branch!
- end
-
- def remove_restored_branches!
- return if opened?
-
- remove_source_branch!
- remove_target_branch!
- end
-
- private
-
- def project
- @project ||= options.fetch(:project)
- end
-
- def source_branch
- @source_branch ||= Representation::Branch.new(raw['head'], repository: project.repository)
- end
-
- def source_branch_name_prefixed
- "gh-#{target_branch_short_sha}/#{iid}/#{source_branch_user}/#{source_branch_ref}"
- end
-
- def target_branch
- @target_branch ||= Representation::Branch.new(raw['base'], repository: project.repository)
- end
-
- def target_branch_name_prefixed
- "gl-#{target_branch_short_sha}/#{iid}/#{target_branch_user}/#{target_branch_ref}"
- end
-
- def cross_project?
- return true if source_branch_repo.nil?
-
- source_branch_repo.id != target_branch_repo.id
- end
-
- def restore_source_branch!
- return if source_branch_exists?
-
- source_branch.restore!(source_branch_name)
- end
-
- def restore_target_branch!
- return if target_branch_exists?
-
- target_branch.restore!(target_branch_name)
- end
-
- def remove_source_branch!
- # We should remove the source/target branches only if they were
- # restored. Otherwise, we'll remove branches like 'master' that
- # target_branch_exists? returns true. In other words, we need
- # to clean up only the restored branches that (source|target)_branch_exists?
- # returns false for the first time it has been called, because of
- # this that is important to memoize these values.
- source_branch.remove!(source_branch_name) unless source_branch_exists?
- end
-
- def remove_target_branch!
- target_branch.remove!(target_branch_name) unless target_branch_exists?
- end
- end
- end
-end
diff --git a/lib/github/representation/release.rb b/lib/github/representation/release.rb
deleted file mode 100644
index e7e4b428c1a..00000000000
--- a/lib/github/representation/release.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-module Github
- module Representation
- class Release < Representation::Base
- def description
- raw['body']
- end
-
- def tag
- raw['tag_name']
- end
-
- def valid?
- !raw['draft']
- end
- end
- end
-end
diff --git a/lib/github/representation/repo.rb b/lib/github/representation/repo.rb
deleted file mode 100644
index 6938aa7db05..00000000000
--- a/lib/github/representation/repo.rb
+++ /dev/null
@@ -1,6 +0,0 @@
-module Github
- module Representation
- class Repo < Representation::Base
- end
- end
-end
diff --git a/lib/github/representation/user.rb b/lib/github/representation/user.rb
deleted file mode 100644
index 18591380e25..00000000000
--- a/lib/github/representation/user.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-module Github
- module Representation
- class User < Representation::Base
- def email
- return @email if defined?(@email)
-
- @email = Github::User.new(username, options).get.fetch('email', nil)
- end
-
- def username
- raw['login']
- end
- end
- end
-end
diff --git a/lib/github/response.rb b/lib/github/response.rb
deleted file mode 100644
index 761c524b553..00000000000
--- a/lib/github/response.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-module Github
- class Response
- attr_reader :raw, :headers, :status
-
- def initialize(response)
- @raw = response
- @headers = response.headers
- @status = response.status
- end
-
- def body
- Oj.load(raw.body, class_cache: false, mode: :compat)
- end
-
- def rels
- links = headers['Link'].to_s.split(', ').map do |link|
- href, name = link.match(/<(.*?)>; rel="(\w+)"/).captures
-
- [name.to_sym, href]
- end
-
- Hash[*links.flatten]
- end
- end
-end
diff --git a/lib/github/user.rb b/lib/github/user.rb
deleted file mode 100644
index f88a29e590b..00000000000
--- a/lib/github/user.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-module Github
- class User
- attr_reader :username, :options
-
- def initialize(username, options)
- @username = username
- @options = options
- end
-
- def get
- client.get(user_url).body
- end
-
- private
-
- def client
- @client ||= Github::Client.new(options)
- end
-
- def user_url
- "/users/#{username}"
- end
- end
-end
diff --git a/lib/gitlab/access.rb b/lib/gitlab/access.rb
index b4012ebbb99..7127948cf00 100644
--- a/lib/gitlab/access.rb
+++ b/lib/gitlab/access.rb
@@ -58,9 +58,9 @@ module Gitlab
def protection_options
{
"Not protected: Both developers and masters can push new commits, force push, or delete the branch." => PROTECTION_NONE,
- "Protected against pushes: Developers cannot push new commits, but are allowed to accept merge requests to the branch." => PROTECTION_DEV_CAN_MERGE,
- "Partially protected: Developers can push new commits, but cannot force push or delete the branch. Masters can do all of those." => PROTECTION_DEV_CAN_PUSH,
- "Fully protected: Developers cannot push new commits, force push, or delete the branch. Only masters can do any of those." => PROTECTION_FULL
+ "Protected against pushes: Developers cannot push new commits, but are allowed to accept merge requests to the branch. Masters can push to the branch." => PROTECTION_DEV_CAN_MERGE,
+ "Partially protected: Both developers and masters can push new commits, but cannot force push or delete the branch." => PROTECTION_DEV_CAN_PUSH,
+ "Fully protected: Developers cannot push new commits, but masters can. No-one can force push or delete the branch." => PROTECTION_FULL
}
end
diff --git a/lib/gitlab/action_rate_limiter.rb b/lib/gitlab/action_rate_limiter.rb
new file mode 100644
index 00000000000..4cd3bdefda3
--- /dev/null
+++ b/lib/gitlab/action_rate_limiter.rb
@@ -0,0 +1,47 @@
+module Gitlab
+ # This class implements a simple rate limiter that can be used to throttle
+ # certain actions. Unlike Rack Attack and Rack::Throttle, which operate at
+ # the middleware level, this can be used at the controller level.
+ class ActionRateLimiter
+ TIME_TO_EXPIRE = 60 # 1 min
+
+ attr_accessor :action, :expiry_time
+
+ def initialize(action:, expiry_time: TIME_TO_EXPIRE)
+ @action = action
+ @expiry_time = expiry_time
+ end
+
+ # Increments the given cache key and increments the value by 1 with the
+ # given expiration time. Returns the incremented value.
+ #
+ # key - An array of ActiveRecord instances
+ def increment(key)
+ value = 0
+
+ Gitlab::Redis::Cache.with do |redis|
+ cache_key = action_key(key)
+ value = redis.incr(cache_key)
+ redis.expire(cache_key, expiry_time) if value == 1
+ end
+
+ value
+ end
+
+ # Increments the given key and returns true if the action should
+ # be throttled.
+ #
+ # key - An array of ActiveRecord instances
+ # threshold_value - The maximum number of times this action should occur in the given time interval
+ def throttled?(key, threshold_value)
+ self.increment(key) > threshold_value
+ end
+
+ private
+
+ def action_key(key)
+ serialized = key.map { |obj| "#{obj.class.model_name.to_s.underscore}:#{obj.id}" }.join(":")
+ "action_rate_limiter:#{action}:#{serialized}"
+ end
+ end
+end
diff --git a/lib/gitlab/asciidoc.rb b/lib/gitlab/asciidoc.rb
index cead1c7eacd..62c41801d75 100644
--- a/lib/gitlab/asciidoc.rb
+++ b/lib/gitlab/asciidoc.rb
@@ -6,11 +6,10 @@ module Gitlab
# Parser/renderer for the AsciiDoc format that uses Asciidoctor and filters
# the resulting HTML through HTML pipeline filters.
module Asciidoc
- extend Gitlab::CurrentSettings
-
DEFAULT_ADOC_ATTRS = [
'showtitle', 'idprefix=user-content-', 'idseparator=-', 'env=gitlab',
- 'env-gitlab', 'source-highlighter=html-pipeline', 'icons=font'
+ 'env-gitlab', 'source-highlighter=html-pipeline', 'icons=font',
+ 'outfilesuffix=.adoc'
].freeze
# Public: Converts the provided Asciidoc markup into HTML.
@@ -33,9 +32,9 @@ module Gitlab
def self.plantuml_setup
Asciidoctor::PlantUml.configure do |conf|
- conf.url = current_application_settings.plantuml_url
- conf.svg_enable = current_application_settings.plantuml_enabled
- conf.png_enable = current_application_settings.plantuml_enabled
+ conf.url = Gitlab::CurrentSettings.plantuml_url
+ conf.svg_enable = Gitlab::CurrentSettings.plantuml_enabled
+ conf.png_enable = Gitlab::CurrentSettings.plantuml_enabled
conf.txt_enable = false
end
end
diff --git a/lib/gitlab/auth.rb b/lib/gitlab/auth.rb
index 87aeb76b66a..f5ccf952cf9 100644
--- a/lib/gitlab/auth.rb
+++ b/lib/gitlab/auth.rb
@@ -1,11 +1,11 @@
module Gitlab
module Auth
- MissingPersonalTokenError = Class.new(StandardError)
+ MissingPersonalAccessTokenError = Class.new(StandardError)
REGISTRY_SCOPES = [:read_registry].freeze
# Scopes used for GitLab API access
- API_SCOPES = [:api, :read_user].freeze
+ API_SCOPES = [:api, :read_user, :sudo].freeze
# Scopes used for OpenID Connect
OPENID_SCOPES = [:openid].freeze
@@ -14,8 +14,6 @@ module Gitlab
DEFAULT_SCOPES = [:api].freeze
class << self
- include Gitlab::CurrentSettings
-
def find_for_git_client(login, password, project:, ip:)
raise "Must provide an IP for rate limiting" if ip.nil?
@@ -25,7 +23,7 @@ module Gitlab
result =
service_request_check(login, password, project) ||
build_access_token_check(login, password) ||
- lfs_token_check(login, password) ||
+ lfs_token_check(login, password, project) ||
oauth_access_token_check(login, password) ||
personal_access_token_check(password) ||
user_with_password_for_git(login, password) ||
@@ -34,30 +32,44 @@ module Gitlab
rate_limit!(ip, success: result.success?, login: login)
Gitlab::Auth::UniqueIpsLimiter.limit_user!(result.actor)
- return result if result.success? || current_application_settings.password_authentication_enabled? || Gitlab::LDAP::Config.enabled?
+ return result if result.success? || authenticate_using_internal_or_ldap_password?
# If sign-in is disabled and LDAP is not configured, recommend a
# personal access token on failed auth attempts
- raise Gitlab::Auth::MissingPersonalTokenError
+ raise Gitlab::Auth::MissingPersonalAccessTokenError
end
def find_with_user_password(login, password)
- # Avoid resource intensive login checks if password is not provided
- return unless password.present?
+ # Avoid resource intensive checks if login credentials are not provided
+ return unless login.present? && password.present?
+
+ # Nothing to do here if internal auth is disabled and LDAP is
+ # not configured
+ return unless authenticate_using_internal_or_ldap_password?
Gitlab::Auth::UniqueIpsLimiter.limit_user! do
user = User.by_login(login)
- # If no user is found, or it's an LDAP server, try LDAP.
- # LDAP users are only authenticated via LDAP
- if user.nil? || user.ldap_user?
- # Second chance - try LDAP authentication
- return unless Gitlab::LDAP::Config.enabled?
+ return if user && !user.active?
+
+ authenticators = []
+
+ if user
+ authenticators << Gitlab::Auth::OAuth::Provider.authentication(user, 'database')
- Gitlab::LDAP::Authentication.login(login, password)
+ # Add authenticators for all identities if user is not nil
+ user&.identities&.each do |identity|
+ authenticators << Gitlab::Auth::OAuth::Provider.authentication(user, identity.provider)
+ end
else
- user if user.active? && user.valid_password?(password)
+ # If no user is provided, try LDAP.
+ # LDAP users are only authenticated via LDAP
+ authenticators << Gitlab::Auth::LDAP::Authentication
end
+
+ authenticators.compact!
+
+ user if authenticators.find { |auth| auth.login(login, password) }
end
end
@@ -84,6 +96,10 @@ module Gitlab
private
+ def authenticate_using_internal_or_ldap_password?
+ Gitlab::CurrentSettings.password_authentication_enabled_for_git? || Gitlab::Auth::LDAP::Config.enabled?
+ end
+
def service_request_check(login, password, project)
matched_login = /(?<service>^[a-zA-Z]*-ci)-token$/.match(login)
@@ -106,7 +122,7 @@ module Gitlab
user = find_with_user_password(login, password)
return unless user
- raise Gitlab::Auth::MissingPersonalTokenError if user.two_factor_enabled?
+ raise Gitlab::Auth::MissingPersonalAccessTokenError if user.two_factor_enabled?
Gitlab::Auth::Result.new(user, nil, :gitlab_or_ldap, full_authentication_abilities)
end
@@ -128,7 +144,7 @@ module Gitlab
token = PersonalAccessTokensFinder.new(state: 'active').find_by(token: password)
if token && valid_scoped_token?(token, available_scopes)
- Gitlab::Auth::Result.new(token.user, nil, :personal_token, abilities_for_scope(token.scopes))
+ Gitlab::Auth::Result.new(token.user, nil, :personal_access_token, abilities_for_scopes(token.scopes))
end
end
@@ -140,13 +156,18 @@ module Gitlab
AccessTokenValidationService.new(token).include_any_scope?(scopes)
end
- def abilities_for_scope(scopes)
- scopes.map do |scope|
- self.public_send(:"#{scope}_scope_authentication_abilities") # rubocop:disable GitlabSecurity/PublicSend
- end.flatten.uniq
+ def abilities_for_scopes(scopes)
+ abilities_by_scope = {
+ api: full_authentication_abilities,
+ read_registry: [:read_container_image]
+ }
+
+ scopes.flat_map do |scope|
+ abilities_by_scope.fetch(scope.to_sym, [])
+ end.uniq
end
- def lfs_token_check(login, password)
+ def lfs_token_check(login, password, project)
deploy_key_matches = login.match(/\Alfs\+deploy-key-(\d+)\z/)
actor =
@@ -163,6 +184,8 @@ module Gitlab
authentication_abilities =
if token_handler.user?
full_authentication_abilities
+ elsif token_handler.deploy_key_pushable?(project)
+ read_write_authentication_abilities
else
read_authentication_abilities
end
@@ -208,26 +231,23 @@ module Gitlab
]
end
- def full_authentication_abilities
+ def read_write_authentication_abilities
read_authentication_abilities + [
:push_code,
- :create_container_image,
- :admin_container_image
+ :create_container_image
]
end
- alias_method :api_scope_authentication_abilities, :full_authentication_abilities
-
- def read_registry_scope_authentication_abilities
- [:read_container_image]
- end
- # The currently used auth method doesn't allow any actions for this scope
- def read_user_scope_authentication_abilities
- []
+ def full_authentication_abilities
+ read_write_authentication_abilities + [
+ :admin_container_image
+ ]
end
- def available_scopes
- API_SCOPES + registry_scopes
+ def available_scopes(current_user = nil)
+ scopes = API_SCOPES + registry_scopes
+ scopes.delete(:sudo) if current_user && !current_user.admin?
+ scopes
end
# Other available scopes
diff --git a/lib/gitlab/auth/blocked_user_tracker.rb b/lib/gitlab/auth/blocked_user_tracker.rb
new file mode 100644
index 00000000000..dae03a179e4
--- /dev/null
+++ b/lib/gitlab/auth/blocked_user_tracker.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+module Gitlab
+ module Auth
+ class BlockedUserTracker
+ ACTIVE_RECORD_REQUEST_PARAMS = 'action_dispatch.request.request_parameters'
+
+ def self.log_if_user_blocked(env)
+ message = env.dig('warden.options', :message)
+
+ # Devise calls User#active_for_authentication? on the User model and then
+ # throws an exception to Warden with User#inactive_message:
+ # https://github.com/plataformatec/devise/blob/v4.2.1/lib/devise/hooks/activatable.rb#L8
+ #
+ # Since Warden doesn't pass the user record to the failure handler, we
+ # need to do a database lookup with the username. We can limit the
+ # lookups to happen when the user was blocked by checking the inactive
+ # message passed along by Warden.
+ return unless message == User::BLOCKED_MESSAGE
+
+ login = env.dig(ACTIVE_RECORD_REQUEST_PARAMS, 'user', 'login')
+
+ return unless login.present?
+
+ user = User.by_login(login)
+
+ return unless user&.blocked?
+
+ Gitlab::AppLogger.info("Failed login for blocked user: user=#{user.username} ip=#{env['REMOTE_ADDR']}")
+ SystemHooksService.new.execute_hooks_for(user, :failed_login)
+
+ true
+ rescue TypeError
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/database/authentication.rb b/lib/gitlab/auth/database/authentication.rb
new file mode 100644
index 00000000000..260a77058a4
--- /dev/null
+++ b/lib/gitlab/auth/database/authentication.rb
@@ -0,0 +1,16 @@
+# These calls help to authenticate to OAuth provider by providing username and password
+#
+
+module Gitlab
+ module Auth
+ module Database
+ class Authentication < Gitlab::Auth::OAuth::Authentication
+ def login(login, password)
+ return false unless Gitlab::CurrentSettings.password_authentication_enabled_for_git?
+
+ user&.valid_password?(password)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/ldap/access.rb b/lib/gitlab/auth/ldap/access.rb
new file mode 100644
index 00000000000..77c0ddc2d48
--- /dev/null
+++ b/lib/gitlab/auth/ldap/access.rb
@@ -0,0 +1,89 @@
+# LDAP authorization model
+#
+# * Check if we are allowed access (not blocked)
+#
+module Gitlab
+ module Auth
+ module LDAP
+ class Access
+ attr_reader :provider, :user
+
+ def self.open(user, &block)
+ Gitlab::Auth::LDAP::Adapter.open(user.ldap_identity.provider) do |adapter|
+ block.call(self.new(user, adapter))
+ end
+ end
+
+ def self.allowed?(user)
+ self.open(user) do |access|
+ if access.allowed?
+ Users::UpdateService.new(user, user: user, last_credential_check_at: Time.now).execute
+
+ true
+ else
+ false
+ end
+ end
+ end
+
+ def initialize(user, adapter = nil)
+ @adapter = adapter
+ @user = user
+ @provider = user.ldap_identity.provider
+ end
+
+ def allowed?
+ if ldap_user
+ unless ldap_config.active_directory
+ unblock_user(user, 'is available again') if user.ldap_blocked?
+ return true
+ end
+
+ # Block user in GitLab if he/she was blocked in AD
+ if Gitlab::Auth::LDAP::Person.disabled_via_active_directory?(user.ldap_identity.extern_uid, adapter)
+ block_user(user, 'is disabled in Active Directory')
+ false
+ else
+ unblock_user(user, 'is not disabled anymore') if user.ldap_blocked?
+ true
+ end
+ else
+ # Block the user if they no longer exist in LDAP/AD
+ block_user(user, 'does not exist anymore')
+ false
+ end
+ end
+
+ def adapter
+ @adapter ||= Gitlab::Auth::LDAP::Adapter.new(provider)
+ end
+
+ def ldap_config
+ Gitlab::Auth::LDAP::Config.new(provider)
+ end
+
+ def ldap_user
+ @ldap_user ||= Gitlab::Auth::LDAP::Person.find_by_dn(user.ldap_identity.extern_uid, adapter)
+ end
+
+ def block_user(user, reason)
+ user.ldap_block
+
+ Gitlab::AppLogger.info(
+ "LDAP account \"#{user.ldap_identity.extern_uid}\" #{reason}, " \
+ "blocking Gitlab user \"#{user.name}\" (#{user.email})"
+ )
+ end
+
+ def unblock_user(user, reason)
+ user.activate
+
+ Gitlab::AppLogger.info(
+ "LDAP account \"#{user.ldap_identity.extern_uid}\" #{reason}, " \
+ "unblocking Gitlab user \"#{user.name}\" (#{user.email})"
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/ldap/adapter.rb b/lib/gitlab/auth/ldap/adapter.rb
new file mode 100644
index 00000000000..caf2d18c668
--- /dev/null
+++ b/lib/gitlab/auth/ldap/adapter.rb
@@ -0,0 +1,110 @@
+module Gitlab
+ module Auth
+ module LDAP
+ class Adapter
+ attr_reader :provider, :ldap
+
+ def self.open(provider, &block)
+ Net::LDAP.open(config(provider).adapter_options) do |ldap|
+ block.call(self.new(provider, ldap))
+ end
+ end
+
+ def self.config(provider)
+ Gitlab::Auth::LDAP::Config.new(provider)
+ end
+
+ def initialize(provider, ldap = nil)
+ @provider = provider
+ @ldap = ldap || Net::LDAP.new(config.adapter_options)
+ end
+
+ def config
+ Gitlab::Auth::LDAP::Config.new(provider)
+ end
+
+ def users(fields, value, limit = nil)
+ options = user_options(Array(fields), value, limit)
+
+ entries = ldap_search(options).select do |entry|
+ entry.respond_to? config.uid
+ end
+
+ entries.map do |entry|
+ Gitlab::Auth::LDAP::Person.new(entry, provider)
+ end
+ end
+
+ def user(*args)
+ users(*args).first
+ end
+
+ def dn_matches_filter?(dn, filter)
+ ldap_search(base: dn,
+ filter: filter,
+ scope: Net::LDAP::SearchScope_BaseObject,
+ attributes: %w{dn}).any?
+ end
+
+ def ldap_search(*args)
+ # Net::LDAP's `time` argument doesn't work. Use Ruby `Timeout` instead.
+ Timeout.timeout(config.timeout) do
+ results = ldap.search(*args)
+
+ if results.nil?
+ response = ldap.get_operation_result
+
+ unless response.code.zero?
+ Rails.logger.warn("LDAP search error: #{response.message}")
+ end
+
+ []
+ else
+ results
+ end
+ end
+ rescue Net::LDAP::Error => error
+ Rails.logger.warn("LDAP search raised exception #{error.class}: #{error.message}")
+ []
+ rescue Timeout::Error
+ Rails.logger.warn("LDAP search timed out after #{config.timeout} seconds")
+ []
+ end
+
+ private
+
+ def user_options(fields, value, limit)
+ options = {
+ attributes: Gitlab::Auth::LDAP::Person.ldap_attributes(config),
+ base: config.base
+ }
+
+ options[:size] = limit if limit
+
+ if fields.include?('dn')
+ raise ArgumentError, 'It is not currently possible to search the DN and other fields at the same time.' if fields.size > 1
+
+ options[:base] = value
+ options[:scope] = Net::LDAP::SearchScope_BaseObject
+ else
+ filter = fields.map { |field| Net::LDAP::Filter.eq(field, value) }.inject(:|)
+ end
+
+ options.merge(filter: user_filter(filter))
+ end
+
+ def user_filter(filter = nil)
+ user_filter = config.constructed_user_filter if config.user_filter.present?
+
+ if user_filter && filter
+ Net::LDAP::Filter.join(filter, user_filter)
+ elsif user_filter
+ user_filter
+ else
+ filter
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/ldap/auth_hash.rb b/lib/gitlab/auth/ldap/auth_hash.rb
new file mode 100644
index 00000000000..ac5c14d374d
--- /dev/null
+++ b/lib/gitlab/auth/ldap/auth_hash.rb
@@ -0,0 +1,48 @@
+# Class to parse and transform the info provided by omniauth
+#
+module Gitlab
+ module Auth
+ module LDAP
+ class AuthHash < Gitlab::Auth::OAuth::AuthHash
+ def uid
+ @uid ||= Gitlab::Auth::LDAP::Person.normalize_dn(super)
+ end
+
+ def username
+ super.tap do |username|
+ username.downcase! if ldap_config.lowercase_usernames
+ end
+ end
+
+ private
+
+ def get_info(key)
+ attributes = ldap_config.attributes[key.to_s]
+ return super unless attributes
+
+ attributes = Array(attributes)
+
+ value = nil
+ attributes.each do |attribute|
+ value = get_raw(attribute)
+ value = value.first if value
+ break if value.present?
+ end
+
+ return super unless value
+
+ Gitlab::Utils.force_utf8(value)
+ value
+ end
+
+ def get_raw(key)
+ auth_hash.extra[:raw_info][key] if auth_hash.extra
+ end
+
+ def ldap_config
+ @ldap_config ||= Gitlab::Auth::LDAP::Config.new(self.provider)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/ldap/authentication.rb b/lib/gitlab/auth/ldap/authentication.rb
new file mode 100644
index 00000000000..e70c3ab6b46
--- /dev/null
+++ b/lib/gitlab/auth/ldap/authentication.rb
@@ -0,0 +1,68 @@
+# These calls help to authenticate to LDAP by providing username and password
+#
+# Since multiple LDAP servers are supported, it will loop through all of them
+# until a valid bind is found
+#
+
+module Gitlab
+ module Auth
+ module LDAP
+ class Authentication < Gitlab::Auth::OAuth::Authentication
+ def self.login(login, password)
+ return unless Gitlab::Auth::LDAP::Config.enabled?
+ return unless login.present? && password.present?
+
+ auth = nil
+ # loop through providers until valid bind
+ providers.find do |provider|
+ auth = new(provider)
+ auth.login(login, password) # true will exit the loop
+ end
+
+ # If (login, password) was invalid for all providers, the value of auth is now the last
+ # Gitlab::Auth::LDAP::Authentication instance we tried.
+ auth.user
+ end
+
+ def self.providers
+ Gitlab::Auth::LDAP::Config.providers
+ end
+
+ attr_accessor :ldap_user
+
+ def login(login, password)
+ @ldap_user = adapter.bind_as(
+ filter: user_filter(login),
+ size: 1,
+ password: password
+ )
+ end
+
+ def adapter
+ OmniAuth::LDAP::Adaptor.new(config.omniauth_options)
+ end
+
+ def config
+ Gitlab::Auth::LDAP::Config.new(provider)
+ end
+
+ def user_filter(login)
+ filter = Net::LDAP::Filter.equals(config.uid, login)
+
+ # Apply LDAP user filter if present
+ if config.user_filter.present?
+ filter = Net::LDAP::Filter.join(filter, config.constructed_user_filter)
+ end
+
+ filter
+ end
+
+ def user
+ return unless ldap_user
+
+ Gitlab::Auth::LDAP::User.find_by_uid_and_provider(ldap_user.dn, provider)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/ldap/config.rb b/lib/gitlab/auth/ldap/config.rb
new file mode 100644
index 00000000000..77185f52ced
--- /dev/null
+++ b/lib/gitlab/auth/ldap/config.rb
@@ -0,0 +1,237 @@
+# Load a specific server configuration
+module Gitlab
+ module Auth
+ module LDAP
+ class Config
+ NET_LDAP_ENCRYPTION_METHOD = {
+ simple_tls: :simple_tls,
+ start_tls: :start_tls,
+ plain: nil
+ }.freeze
+
+ attr_accessor :provider, :options
+
+ def self.enabled?
+ Gitlab.config.ldap.enabled
+ end
+
+ def self.servers
+ Gitlab.config.ldap['servers']&.values || []
+ end
+
+ def self.available_servers
+ return [] unless enabled?
+
+ Array.wrap(servers.first)
+ end
+
+ def self.providers
+ servers.map { |server| server['provider_name'] }
+ end
+
+ def self.valid_provider?(provider)
+ providers.include?(provider)
+ end
+
+ def self.invalid_provider(provider)
+ raise "Unknown provider (#{provider}). Available providers: #{providers}"
+ end
+
+ def initialize(provider)
+ if self.class.valid_provider?(provider)
+ @provider = provider
+ else
+ self.class.invalid_provider(provider)
+ end
+
+ @options = config_for(@provider) # Use @provider, not provider
+ end
+
+ def enabled?
+ base_config.enabled
+ end
+
+ def adapter_options
+ opts = base_options.merge(
+ encryption: encryption_options
+ )
+
+ opts.merge!(auth_options) if has_auth?
+
+ opts
+ end
+
+ def omniauth_options
+ opts = base_options.merge(
+ base: base,
+ encryption: options['encryption'],
+ filter: omniauth_user_filter,
+ name_proc: name_proc,
+ disable_verify_certificates: !options['verify_certificates']
+ )
+
+ if has_auth?
+ opts.merge!(
+ bind_dn: options['bind_dn'],
+ password: options['password']
+ )
+ end
+
+ opts[:ca_file] = options['ca_file'] if options['ca_file'].present?
+ opts[:ssl_version] = options['ssl_version'] if options['ssl_version'].present?
+
+ opts
+ end
+
+ def base
+ options['base']
+ end
+
+ def uid
+ options['uid']
+ end
+
+ def sync_ssh_keys?
+ sync_ssh_keys.present?
+ end
+
+ # The LDAP attribute in which the ssh keys are stored
+ def sync_ssh_keys
+ options['sync_ssh_keys']
+ end
+
+ def user_filter
+ options['user_filter']
+ end
+
+ def constructed_user_filter
+ @constructed_user_filter ||= Net::LDAP::Filter.construct(user_filter)
+ end
+
+ def group_base
+ options['group_base']
+ end
+
+ def admin_group
+ options['admin_group']
+ end
+
+ def active_directory
+ options['active_directory']
+ end
+
+ def block_auto_created_users
+ options['block_auto_created_users']
+ end
+
+ def attributes
+ default_attributes.merge(options['attributes'])
+ end
+
+ def timeout
+ options['timeout'].to_i
+ end
+
+ def has_auth?
+ options['password'] || options['bind_dn']
+ end
+
+ def allow_username_or_email_login
+ options['allow_username_or_email_login']
+ end
+
+ def lowercase_usernames
+ options['lowercase_usernames']
+ end
+
+ def name_proc
+ if allow_username_or_email_login
+ proc { |name| name.gsub(/@.*\z/, '') }
+ else
+ proc { |name| name }
+ end
+ end
+
+ def default_attributes
+ {
+ 'username' => %w(uid sAMAccountName userid),
+ 'email' => %w(mail email userPrincipalName),
+ 'name' => 'cn',
+ 'first_name' => 'givenName',
+ 'last_name' => 'sn'
+ }
+ end
+
+ protected
+
+ def base_options
+ {
+ host: options['host'],
+ port: options['port']
+ }
+ end
+
+ def base_config
+ Gitlab.config.ldap
+ end
+
+ def config_for(provider)
+ base_config.servers.values.find { |server| server['provider_name'] == provider }
+ end
+
+ def encryption_options
+ method = translate_method(options['encryption'])
+ return nil unless method
+
+ {
+ method: method,
+ tls_options: tls_options(method)
+ }
+ end
+
+ def translate_method(method_from_config)
+ NET_LDAP_ENCRYPTION_METHOD[method_from_config.to_sym]
+ end
+
+ def tls_options(method)
+ return { verify_mode: OpenSSL::SSL::VERIFY_NONE } unless method
+
+ opts = if options['verify_certificates']
+ OpenSSL::SSL::SSLContext::DEFAULT_PARAMS
+ else
+ # It is important to explicitly set verify_mode for two reasons:
+ # 1. The behavior of OpenSSL is undefined when verify_mode is not set.
+ # 2. The net-ldap gem implementation verifies the certificate hostname
+ # unless verify_mode is set to VERIFY_NONE.
+ { verify_mode: OpenSSL::SSL::VERIFY_NONE }
+ end
+
+ opts[:ca_file] = options['ca_file'] if options['ca_file'].present?
+ opts[:ssl_version] = options['ssl_version'] if options['ssl_version'].present?
+
+ opts
+ end
+
+ def auth_options
+ {
+ auth: {
+ method: :simple,
+ username: options['bind_dn'],
+ password: options['password']
+ }
+ }
+ end
+
+ def omniauth_user_filter
+ uid_filter = Net::LDAP::Filter.eq(uid, '%{username}')
+
+ if user_filter.present?
+ Net::LDAP::Filter.join(uid_filter, constructed_user_filter).to_s
+ else
+ uid_filter.to_s
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/ldap/dn.rb b/lib/gitlab/auth/ldap/dn.rb
new file mode 100644
index 00000000000..1fa5338f5a6
--- /dev/null
+++ b/lib/gitlab/auth/ldap/dn.rb
@@ -0,0 +1,303 @@
+# -*- ruby encoding: utf-8 -*-
+
+# Based on the `ruby-net-ldap` gem's `Net::LDAP::DN`
+#
+# For our purposes, this class is used to normalize DNs in order to allow proper
+# comparison.
+#
+# E.g. DNs should be compared case-insensitively (in basically all LDAP
+# implementations or setups), therefore we downcase every DN.
+
+##
+# Objects of this class represent an LDAP DN ("Distinguished Name"). A DN
+# ("Distinguished Name") is a unique identifier for an entry within an LDAP
+# directory. It is made up of a number of other attributes strung together,
+# to identify the entry in the tree.
+#
+# Each attribute that makes up a DN needs to have its value escaped so that
+# the DN is valid. This class helps take care of that.
+#
+# A fully escaped DN needs to be unescaped when analysing its contents. This
+# class also helps take care of that.
+module Gitlab
+ module Auth
+ module LDAP
+ class DN
+ FormatError = Class.new(StandardError)
+ MalformedError = Class.new(FormatError)
+ UnsupportedError = Class.new(FormatError)
+
+ def self.normalize_value(given_value)
+ dummy_dn = "placeholder=#{given_value}"
+ normalized_dn = new(*dummy_dn).to_normalized_s
+ normalized_dn.sub(/\Aplaceholder=/, '')
+ end
+
+ ##
+ # Initialize a DN, escaping as required. Pass in attributes in name/value
+ # pairs. If there is a left over argument, it will be appended to the dn
+ # without escaping (useful for a base string).
+ #
+ # Most uses of this class will be to escape a DN, rather than to parse it,
+ # so storing the dn as an escaped String and parsing parts as required
+ # with a state machine seems sensible.
+ def initialize(*args)
+ if args.length > 1
+ initialize_array(args)
+ else
+ initialize_string(args[0])
+ end
+ end
+
+ ##
+ # Parse a DN into key value pairs using ASN from
+ # http://tools.ietf.org/html/rfc2253 section 3.
+ # rubocop:disable Metrics/AbcSize
+ # rubocop:disable Metrics/CyclomaticComplexity
+ # rubocop:disable Metrics/PerceivedComplexity
+ def each_pair
+ state = :key
+ key = StringIO.new
+ value = StringIO.new
+ hex_buffer = ""
+
+ @dn.each_char.with_index do |char, dn_index|
+ case state
+ when :key then
+ case char
+ when 'a'..'z', 'A'..'Z' then
+ state = :key_normal
+ key << char
+ when '0'..'9' then
+ state = :key_oid
+ key << char
+ when ' ' then state = :key
+ else raise(MalformedError, "Unrecognized first character of an RDN attribute type name \"#{char}\"")
+ end
+ when :key_normal then
+ case char
+ when '=' then state = :value
+ when 'a'..'z', 'A'..'Z', '0'..'9', '-', ' ' then key << char
+ else raise(MalformedError, "Unrecognized RDN attribute type name character \"#{char}\"")
+ end
+ when :key_oid then
+ case char
+ when '=' then state = :value
+ when '0'..'9', '.', ' ' then key << char
+ else raise(MalformedError, "Unrecognized RDN OID attribute type name character \"#{char}\"")
+ end
+ when :value then
+ case char
+ when '\\' then state = :value_normal_escape
+ when '"' then state = :value_quoted
+ when ' ' then state = :value
+ when '#' then
+ state = :value_hexstring
+ value << char
+ when ',' then
+ state = :key
+ yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
+ key = StringIO.new
+ value = StringIO.new
+ else
+ state = :value_normal
+ value << char
+ end
+ when :value_normal then
+ case char
+ when '\\' then state = :value_normal_escape
+ when ',' then
+ state = :key
+ yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
+ key = StringIO.new
+ value = StringIO.new
+ when '+' then raise(UnsupportedError, "Multivalued RDNs are not supported")
+ else value << char
+ end
+ when :value_normal_escape then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_normal_escape_hex
+ hex_buffer = char
+ else
+ state = :value_normal
+ value << char
+ end
+ when :value_normal_escape_hex then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_normal
+ value << "#{hex_buffer}#{char}".to_i(16).chr
+ else raise(MalformedError, "Invalid escaped hex code \"\\#{hex_buffer}#{char}\"")
+ end
+ when :value_quoted then
+ case char
+ when '\\' then state = :value_quoted_escape
+ when '"' then state = :value_end
+ else value << char
+ end
+ when :value_quoted_escape then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_quoted_escape_hex
+ hex_buffer = char
+ else
+ state = :value_quoted
+ value << char
+ end
+ when :value_quoted_escape_hex then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_quoted
+ value << "#{hex_buffer}#{char}".to_i(16).chr
+ else raise(MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"#{char}\"")
+ end
+ when :value_hexstring then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_hexstring_hex
+ value << char
+ when ' ' then state = :value_end
+ when ',' then
+ state = :key
+ yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
+ key = StringIO.new
+ value = StringIO.new
+ else raise(MalformedError, "Expected the first character of a hex pair, but got \"#{char}\"")
+ end
+ when :value_hexstring_hex then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_hexstring
+ value << char
+ else raise(MalformedError, "Expected the second character of a hex pair, but got \"#{char}\"")
+ end
+ when :value_end then
+ case char
+ when ' ' then state = :value_end
+ when ',' then
+ state = :key
+ yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
+ key = StringIO.new
+ value = StringIO.new
+ else raise(MalformedError, "Expected the end of an attribute value, but got \"#{char}\"")
+ end
+ else raise "Fell out of state machine"
+ end
+ end
+
+ # Last pair
+ raise(MalformedError, 'DN string ended unexpectedly') unless
+ [:value, :value_normal, :value_hexstring, :value_end].include? state
+
+ yield key.string.strip, rstrip_except_escaped(value.string, @dn.length)
+ end
+
+ def rstrip_except_escaped(str, dn_index)
+ str_ends_with_whitespace = str.match(/\s\z/)
+
+ if str_ends_with_whitespace
+ dn_part_ends_with_escaped_whitespace = @dn[0, dn_index].match(/\\(\s+)\z/)
+
+ if dn_part_ends_with_escaped_whitespace
+ dn_part_rwhitespace = dn_part_ends_with_escaped_whitespace[1]
+ num_chars_to_remove = dn_part_rwhitespace.length - 1
+ str = str[0, str.length - num_chars_to_remove]
+ else
+ str.rstrip!
+ end
+ end
+
+ str
+ end
+
+ ##
+ # Returns the DN as an array in the form expected by the constructor.
+ def to_a
+ a = []
+ self.each_pair { |key, value| a << key << value } unless @dn.empty?
+ a
+ end
+
+ ##
+ # Return the DN as an escaped string.
+ def to_s
+ @dn
+ end
+
+ ##
+ # Return the DN as an escaped and normalized string.
+ def to_normalized_s
+ self.class.new(*to_a).to_s.downcase
+ end
+
+ # https://tools.ietf.org/html/rfc4514 section 2.4 lists these exceptions
+ # for DN values. All of the following must be escaped in any normal string
+ # using a single backslash ('\') as escape. The space character is left
+ # out here because in a "normalized" string, spaces should only be escaped
+ # if necessary (i.e. leading or trailing space).
+ NORMAL_ESCAPES = [',', '+', '"', '\\', '<', '>', ';', '='].freeze
+
+ # The following must be represented as escaped hex
+ HEX_ESCAPES = {
+ "\n" => '\0a',
+ "\r" => '\0d'
+ }.freeze
+
+ # Compiled character class regexp using the keys from the above hash, and
+ # checking for a space or # at the start, or space at the end, of the
+ # string.
+ ESCAPE_RE = Regexp.new("(^ |^#| $|[" +
+ NORMAL_ESCAPES.map { |e| Regexp.escape(e) }.join +
+ "])")
+
+ HEX_ESCAPE_RE = Regexp.new("([" +
+ HEX_ESCAPES.keys.map { |e| Regexp.escape(e) }.join +
+ "])")
+
+ ##
+ # Escape a string for use in a DN value
+ def self.escape(string)
+ escaped = string.gsub(ESCAPE_RE) { |char| "\\" + char }
+ escaped.gsub(HEX_ESCAPE_RE) { |char| HEX_ESCAPES[char] }
+ end
+
+ private
+
+ def initialize_array(args)
+ buffer = StringIO.new
+
+ args.each_with_index do |arg, index|
+ if index.even? # key
+ buffer << "," if index > 0
+ buffer << arg
+ else # value
+ buffer << "="
+ buffer << self.class.escape(arg)
+ end
+ end
+
+ @dn = buffer.string
+ end
+
+ def initialize_string(arg)
+ @dn = arg.to_s
+ end
+
+ ##
+ # Proxy all other requests to the string object, because a DN is mainly
+ # used within the library as a string
+ # rubocop:disable GitlabSecurity/PublicSend
+ def method_missing(method, *args, &block)
+ @dn.send(method, *args, &block)
+ end
+
+ ##
+ # Redefined to be consistent with redefined `method_missing` behavior
+ def respond_to?(sym, include_private = false)
+ @dn.respond_to?(sym, include_private)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/ldap/person.rb b/lib/gitlab/auth/ldap/person.rb
new file mode 100644
index 00000000000..8dfae3ee541
--- /dev/null
+++ b/lib/gitlab/auth/ldap/person.rb
@@ -0,0 +1,122 @@
+module Gitlab
+ module Auth
+ module LDAP
+ class Person
+ # Active Directory-specific LDAP filter that checks if bit 2 of the
+ # userAccountControl attribute is set.
+ # Source: http://ctogonewild.com/2009/09/03/bitmask-searches-in-ldap/
+ AD_USER_DISABLED = Net::LDAP::Filter.ex("userAccountControl:1.2.840.113556.1.4.803", "2")
+
+ InvalidEntryError = Class.new(StandardError)
+
+ attr_accessor :entry, :provider
+
+ def self.find_by_uid(uid, adapter)
+ uid = Net::LDAP::Filter.escape(uid)
+ adapter.user(adapter.config.uid, uid)
+ end
+
+ def self.find_by_dn(dn, adapter)
+ adapter.user('dn', dn)
+ end
+
+ def self.find_by_email(email, adapter)
+ email_fields = adapter.config.attributes['email']
+
+ adapter.user(email_fields, email)
+ end
+
+ def self.disabled_via_active_directory?(dn, adapter)
+ adapter.dn_matches_filter?(dn, AD_USER_DISABLED)
+ end
+
+ def self.ldap_attributes(config)
+ [
+ 'dn',
+ config.uid,
+ *config.attributes['name'],
+ *config.attributes['email'],
+ *config.attributes['username']
+ ].compact.uniq
+ end
+
+ def self.normalize_dn(dn)
+ ::Gitlab::Auth::LDAP::DN.new(dn).to_normalized_s
+ rescue ::Gitlab::Auth::LDAP::DN::FormatError => e
+ Rails.logger.info("Returning original DN \"#{dn}\" due to error during normalization attempt: #{e.message}")
+
+ dn
+ end
+
+ # Returns the UID in a normalized form.
+ #
+ # 1. Excess spaces are stripped
+ # 2. The string is downcased (for case-insensitivity)
+ def self.normalize_uid(uid)
+ ::Gitlab::Auth::LDAP::DN.normalize_value(uid)
+ rescue ::Gitlab::Auth::LDAP::DN::FormatError => e
+ Rails.logger.info("Returning original UID \"#{uid}\" due to error during normalization attempt: #{e.message}")
+
+ uid
+ end
+
+ def initialize(entry, provider)
+ Rails.logger.debug { "Instantiating #{self.class.name} with LDIF:\n#{entry.to_ldif}" }
+ @entry = entry
+ @provider = provider
+ end
+
+ def name
+ attribute_value(:name).first
+ end
+
+ def uid
+ entry.public_send(config.uid).first # rubocop:disable GitlabSecurity/PublicSend
+ end
+
+ def username
+ username = attribute_value(:username)
+
+ # Depending on the attribute, multiple values may
+ # be returned. We need only one for username.
+ # Ex. `uid` returns only one value but `mail` may
+ # return an array of multiple email addresses.
+ [username].flatten.first.tap do |username|
+ username.downcase! if config.lowercase_usernames
+ end
+ end
+
+ def email
+ attribute_value(:email)
+ end
+
+ def dn
+ self.class.normalize_dn(entry.dn)
+ end
+
+ private
+
+ def entry
+ @entry
+ end
+
+ def config
+ @config ||= Gitlab::Auth::LDAP::Config.new(provider)
+ end
+
+ # Using the LDAP attributes configuration, find and return the first
+ # attribute with a value. For example, by default, when given 'email',
+ # this method looks for 'mail', 'email' and 'userPrincipalName' and
+ # returns the first with a value.
+ def attribute_value(attribute)
+ attributes = Array(config.attributes[attribute.to_s])
+ selected_attr = attributes.find { |attr| entry.respond_to?(attr) }
+
+ return nil unless selected_attr
+
+ entry.public_send(selected_attr) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/ldap/user.rb b/lib/gitlab/auth/ldap/user.rb
new file mode 100644
index 00000000000..068212d9a21
--- /dev/null
+++ b/lib/gitlab/auth/ldap/user.rb
@@ -0,0 +1,54 @@
+# LDAP extension for User model
+#
+# * Find or create user from omniauth.auth data
+# * Links LDAP account with existing user
+# * Auth LDAP user with login and password
+#
+module Gitlab
+ module Auth
+ module LDAP
+ class User < Gitlab::Auth::OAuth::User
+ class << self
+ def find_by_uid_and_provider(uid, provider)
+ identity = ::Identity.with_extern_uid(provider, uid).take
+
+ identity && identity.user
+ end
+ end
+
+ def save
+ super('LDAP')
+ end
+
+ # instance methods
+ def find_user
+ find_by_uid_and_provider || find_by_email || build_new_user
+ end
+
+ def find_by_uid_and_provider
+ self.class.find_by_uid_and_provider(auth_hash.uid, auth_hash.provider)
+ end
+
+ def changed?
+ gl_user.changed? || gl_user.identities.any?(&:changed?)
+ end
+
+ def block_after_signup?
+ ldap_config.block_auto_created_users
+ end
+
+ def allowed?
+ Gitlab::Auth::LDAP::Access.allowed?(gl_user)
+ end
+
+ def ldap_config
+ Gitlab::Auth::LDAP::Config.new(auth_hash.provider)
+ end
+
+ def auth_hash=(auth_hash)
+ @auth_hash = Gitlab::Auth::LDAP::AuthHash.new(auth_hash)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/o_auth/auth_hash.rb b/lib/gitlab/auth/o_auth/auth_hash.rb
new file mode 100644
index 00000000000..ed8fba94305
--- /dev/null
+++ b/lib/gitlab/auth/o_auth/auth_hash.rb
@@ -0,0 +1,92 @@
+# Class to parse and transform the info provided by omniauth
+#
+module Gitlab
+ module Auth
+ module OAuth
+ class AuthHash
+ attr_reader :auth_hash
+ def initialize(auth_hash)
+ @auth_hash = auth_hash
+ end
+
+ def uid
+ @uid ||= Gitlab::Utils.force_utf8(auth_hash.uid.to_s)
+ end
+
+ def provider
+ @provider ||= auth_hash.provider.to_s
+ end
+
+ def name
+ @name ||= get_info(:name) || "#{get_info(:first_name)} #{get_info(:last_name)}"
+ end
+
+ def username
+ @username ||= username_and_email[:username].to_s
+ end
+
+ def email
+ @email ||= username_and_email[:email].to_s
+ end
+
+ def password
+ @password ||= Gitlab::Utils.force_utf8(Devise.friendly_token[0, 8].downcase)
+ end
+
+ def location
+ location = get_info(:address)
+ if location.is_a?(Hash)
+ [location.locality.presence, location.country.presence].compact.join(', ')
+ else
+ location
+ end
+ end
+
+ def has_attribute?(attribute)
+ if attribute == :location
+ get_info(:address).present?
+ else
+ get_info(attribute).present?
+ end
+ end
+
+ private
+
+ def info
+ auth_hash.info
+ end
+
+ def get_info(key)
+ value = info[key]
+ Gitlab::Utils.force_utf8(value) if value
+ value
+ end
+
+ def username_and_email
+ @username_and_email ||= begin
+ username = get_info(:username).presence || get_info(:nickname).presence
+ email = get_info(:email).presence
+
+ username ||= generate_username(email) if email
+ email ||= generate_temporarily_email(username) if username
+
+ {
+ username: username,
+ email: email
+ }
+ end
+ end
+
+ # Get the first part of the email address (before @)
+ # In addtion in removes illegal characters
+ def generate_username(email)
+ email.match(/^[^@]*/)[0].mb_chars.normalize(:kd).gsub(/[^\x00-\x7F]/, '').to_s
+ end
+
+ def generate_temporarily_email(username)
+ "temp-email-for-oauth-#{username}@gitlab.localhost"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/o_auth/authentication.rb b/lib/gitlab/auth/o_auth/authentication.rb
new file mode 100644
index 00000000000..ed03b9f8b40
--- /dev/null
+++ b/lib/gitlab/auth/o_auth/authentication.rb
@@ -0,0 +1,21 @@
+# These calls help to authenticate to OAuth provider by providing username and password
+#
+
+module Gitlab
+ module Auth
+ module OAuth
+ class Authentication
+ attr_reader :provider, :user
+
+ def initialize(provider, user = nil)
+ @provider = provider
+ @user = user
+ end
+
+ def login(login, password)
+ raise NotImplementedError
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/o_auth/provider.rb b/lib/gitlab/auth/o_auth/provider.rb
new file mode 100644
index 00000000000..5fb61ffe00d
--- /dev/null
+++ b/lib/gitlab/auth/o_auth/provider.rb
@@ -0,0 +1,73 @@
+module Gitlab
+ module Auth
+ module OAuth
+ class Provider
+ LABELS = {
+ "github" => "GitHub",
+ "gitlab" => "GitLab.com",
+ "google_oauth2" => "Google"
+ }.freeze
+
+ def self.authentication(user, provider)
+ return unless user
+ return unless enabled?(provider)
+
+ authenticator =
+ case provider
+ when /^ldap/
+ Gitlab::Auth::LDAP::Authentication
+ when 'database'
+ Gitlab::Auth::Database::Authentication
+ end
+
+ authenticator&.new(provider, user)
+ end
+
+ def self.providers
+ Devise.omniauth_providers
+ end
+
+ def self.enabled?(name)
+ return true if name == 'database'
+
+ providers.include?(name.to_sym)
+ end
+
+ def self.ldap_provider?(name)
+ name.to_s.start_with?('ldap')
+ end
+
+ def self.sync_profile_from_provider?(provider)
+ return true if ldap_provider?(provider)
+
+ providers = Gitlab.config.omniauth.sync_profile_from_provider
+
+ if providers.is_a?(Array)
+ providers.include?(provider)
+ else
+ providers
+ end
+ end
+
+ def self.config_for(name)
+ name = name.to_s
+ if ldap_provider?(name)
+ if Gitlab::Auth::LDAP::Config.valid_provider?(name)
+ Gitlab::Auth::LDAP::Config.new(name).options
+ else
+ nil
+ end
+ else
+ Gitlab.config.omniauth.providers.find { |provider| provider.name == name }
+ end
+ end
+
+ def self.label_for(name)
+ name = name.to_s
+ config = config_for(name)
+ (config && config['label']) || LABELS[name] || name.titleize
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/o_auth/session.rb b/lib/gitlab/auth/o_auth/session.rb
new file mode 100644
index 00000000000..8f2b4d58552
--- /dev/null
+++ b/lib/gitlab/auth/o_auth/session.rb
@@ -0,0 +1,21 @@
+# :nocov:
+module Gitlab
+ module Auth
+ module OAuth
+ module Session
+ def self.create(provider, ticket)
+ Rails.cache.write("gitlab:#{provider}:#{ticket}", ticket, expires_in: Gitlab.config.omniauth.cas3.session_duration)
+ end
+
+ def self.destroy(provider, ticket)
+ Rails.cache.delete("gitlab:#{provider}:#{ticket}")
+ end
+
+ def self.valid?(provider, ticket)
+ Rails.cache.read("gitlab:#{provider}:#{ticket}").present?
+ end
+ end
+ end
+ end
+end
+# :nocov:
diff --git a/lib/gitlab/auth/o_auth/user.rb b/lib/gitlab/auth/o_auth/user.rb
new file mode 100644
index 00000000000..b6a96081278
--- /dev/null
+++ b/lib/gitlab/auth/o_auth/user.rb
@@ -0,0 +1,246 @@
+# OAuth extension for User model
+#
+# * Find GitLab user based on omniauth uid and provider
+# * Create new user from omniauth data
+#
+module Gitlab
+ module Auth
+ module OAuth
+ class User
+ SignupDisabledError = Class.new(StandardError)
+ SigninDisabledForProviderError = Class.new(StandardError)
+
+ attr_accessor :auth_hash, :gl_user
+
+ def initialize(auth_hash)
+ self.auth_hash = auth_hash
+ update_profile
+ add_or_update_user_identities
+ end
+
+ def persisted?
+ gl_user.try(:persisted?)
+ end
+
+ def new?
+ !persisted?
+ end
+
+ def valid?
+ gl_user.try(:valid?)
+ end
+
+ def save(provider = 'OAuth')
+ raise SigninDisabledForProviderError if oauth_provider_disabled?
+ raise SignupDisabledError unless gl_user
+
+ block_after_save = needs_blocking?
+
+ Users::UpdateService.new(gl_user, user: gl_user).execute!
+
+ gl_user.block if block_after_save
+
+ log.info "(#{provider}) saving user #{auth_hash.email} from login with extern_uid => #{auth_hash.uid}"
+ gl_user
+ rescue ActiveRecord::RecordInvalid => e
+ log.info "(#{provider}) Error saving user #{auth_hash.uid} (#{auth_hash.email}): #{gl_user.errors.full_messages}"
+ return self, e.record.errors
+ end
+
+ def gl_user
+ return @gl_user if defined?(@gl_user)
+
+ @gl_user = find_user
+ end
+
+ def find_user
+ user = find_by_uid_and_provider
+
+ user ||= find_or_build_ldap_user if auto_link_ldap_user?
+ user ||= build_new_user if signup_enabled?
+
+ user.external = true if external_provider? && user&.new_record?
+
+ user
+ end
+
+ protected
+
+ def add_or_update_user_identities
+ return unless gl_user
+
+ # find_or_initialize_by doesn't update `gl_user.identities`, and isn't autosaved.
+ identity = gl_user.identities.find { |identity| identity.provider == auth_hash.provider }
+
+ identity ||= gl_user.identities.build(provider: auth_hash.provider)
+ identity.extern_uid = auth_hash.uid
+
+ if auto_link_ldap_user? && !gl_user.ldap_user? && ldap_person
+ log.info "Correct LDAP account has been found. identity to user: #{gl_user.username}."
+ gl_user.identities.build(provider: ldap_person.provider, extern_uid: ldap_person.dn)
+ end
+ end
+
+ def find_or_build_ldap_user
+ return unless ldap_person
+
+ user = Gitlab::Auth::LDAP::User.find_by_uid_and_provider(ldap_person.dn, ldap_person.provider)
+ if user
+ log.info "LDAP account found for user #{user.username}. Building new #{auth_hash.provider} identity."
+ return user
+ end
+
+ log.info "No user found using #{auth_hash.provider} provider. Creating a new one."
+ build_new_user
+ end
+
+ def find_by_email
+ return unless auth_hash.has_attribute?(:email)
+
+ ::User.find_by(email: auth_hash.email.downcase)
+ end
+
+ def auto_link_ldap_user?
+ Gitlab.config.omniauth.auto_link_ldap_user
+ end
+
+ def creating_linked_ldap_user?
+ auto_link_ldap_user? && ldap_person
+ end
+
+ def ldap_person
+ return @ldap_person if defined?(@ldap_person)
+
+ # Look for a corresponding person with same uid in any of the configured LDAP providers
+ Gitlab::Auth::LDAP::Config.providers.each do |provider|
+ adapter = Gitlab::Auth::LDAP::Adapter.new(provider)
+ @ldap_person = find_ldap_person(auth_hash, adapter)
+ break if @ldap_person
+ end
+ @ldap_person
+ end
+
+ def find_ldap_person(auth_hash, adapter)
+ Gitlab::Auth::LDAP::Person.find_by_uid(auth_hash.uid, adapter) ||
+ Gitlab::Auth::LDAP::Person.find_by_email(auth_hash.uid, adapter) ||
+ Gitlab::Auth::LDAP::Person.find_by_dn(auth_hash.uid, adapter)
+ end
+
+ def ldap_config
+ Gitlab::Auth::LDAP::Config.new(ldap_person.provider) if ldap_person
+ end
+
+ def needs_blocking?
+ new? && block_after_signup?
+ end
+
+ def signup_enabled?
+ providers = Gitlab.config.omniauth.allow_single_sign_on
+ if providers.is_a?(Array)
+ providers.include?(auth_hash.provider)
+ else
+ providers
+ end
+ end
+
+ def external_provider?
+ Gitlab.config.omniauth.external_providers.include?(auth_hash.provider)
+ end
+
+ def block_after_signup?
+ if creating_linked_ldap_user?
+ ldap_config.block_auto_created_users
+ else
+ Gitlab.config.omniauth.block_auto_created_users
+ end
+ end
+
+ def auth_hash=(auth_hash)
+ @auth_hash = AuthHash.new(auth_hash)
+ end
+
+ def find_by_uid_and_provider
+ identity = Identity.with_extern_uid(auth_hash.provider, auth_hash.uid).take
+ identity&.user
+ end
+
+ def build_new_user
+ user_params = user_attributes.merge(skip_confirmation: true)
+ Users::BuildService.new(nil, user_params).execute(skip_authorization: true)
+ end
+
+ def user_attributes
+ # Give preference to LDAP for sensitive information when creating a linked account
+ if creating_linked_ldap_user?
+ username = ldap_person.username.presence
+ email = ldap_person.email.first.presence
+ end
+
+ username ||= auth_hash.username
+ email ||= auth_hash.email
+
+ valid_username = ::Namespace.clean_path(username)
+
+ uniquify = Uniquify.new
+ valid_username = uniquify.string(valid_username) { |s| !NamespacePathValidator.valid_path?(s) }
+
+ name = auth_hash.name
+ name = valid_username if name.strip.empty?
+
+ {
+ name: name,
+ username: valid_username,
+ email: email,
+ password: auth_hash.password,
+ password_confirmation: auth_hash.password,
+ password_automatically_set: true
+ }
+ end
+
+ def sync_profile_from_provider?
+ Gitlab::Auth::OAuth::Provider.sync_profile_from_provider?(auth_hash.provider)
+ end
+
+ def update_profile
+ clear_user_synced_attributes_metadata
+
+ return unless sync_profile_from_provider? || creating_linked_ldap_user?
+
+ metadata = gl_user.build_user_synced_attributes_metadata
+
+ if sync_profile_from_provider?
+ UserSyncedAttributesMetadata::SYNCABLE_ATTRIBUTES.each do |key|
+ if auth_hash.has_attribute?(key) && gl_user.sync_attribute?(key)
+ gl_user[key] = auth_hash.public_send(key) # rubocop:disable GitlabSecurity/PublicSend
+ metadata.set_attribute_synced(key, true)
+ else
+ metadata.set_attribute_synced(key, false)
+ end
+ end
+
+ metadata.provider = auth_hash.provider
+ end
+
+ if creating_linked_ldap_user? && gl_user.email == ldap_person.email.first
+ metadata.set_attribute_synced(:email, true)
+ metadata.provider = ldap_person.provider
+ end
+ end
+
+ def clear_user_synced_attributes_metadata
+ gl_user&.user_synced_attributes_metadata&.destroy
+ end
+
+ def log
+ Gitlab::AppLogger
+ end
+
+ def oauth_provider_disabled?
+ Gitlab::CurrentSettings.current_application_settings
+ .disabled_oauth_sign_in_sources
+ .include?(auth_hash.provider)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/request_authenticator.rb b/lib/gitlab/auth/request_authenticator.rb
new file mode 100644
index 00000000000..a0b5cd868c3
--- /dev/null
+++ b/lib/gitlab/auth/request_authenticator.rb
@@ -0,0 +1,33 @@
+# Use for authentication only, in particular for Rack::Attack.
+# Does not perform authorization of scopes, etc.
+module Gitlab
+ module Auth
+ class RequestAuthenticator
+ include UserAuthFinders
+
+ attr_reader :request
+
+ def initialize(request)
+ @request = request
+ end
+
+ def user
+ find_sessionless_user || find_user_from_warden
+ end
+
+ def find_sessionless_user
+ find_user_from_access_token || find_user_from_rss_token
+ rescue Gitlab::Auth::AuthenticationError
+ nil
+ end
+
+ def valid_access_token?(scopes: [])
+ validate_access_token!(scopes: scopes)
+
+ true
+ rescue Gitlab::Auth::AuthenticationError
+ false
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/saml/auth_hash.rb b/lib/gitlab/auth/saml/auth_hash.rb
new file mode 100644
index 00000000000..c345a7e3f6c
--- /dev/null
+++ b/lib/gitlab/auth/saml/auth_hash.rb
@@ -0,0 +1,19 @@
+module Gitlab
+ module Auth
+ module Saml
+ class AuthHash < Gitlab::Auth::OAuth::AuthHash
+ def groups
+ Array.wrap(get_raw(Gitlab::Auth::Saml::Config.groups))
+ end
+
+ private
+
+ def get_raw(key)
+ # Needs to call `all` because of https://git.io/vVo4u
+ # otherwise just the first value is returned
+ auth_hash.extra[:raw_info].all[key]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/saml/config.rb b/lib/gitlab/auth/saml/config.rb
new file mode 100644
index 00000000000..e654e7fe438
--- /dev/null
+++ b/lib/gitlab/auth/saml/config.rb
@@ -0,0 +1,21 @@
+module Gitlab
+ module Auth
+ module Saml
+ class Config
+ class << self
+ def options
+ Gitlab.config.omniauth.providers.find { |provider| provider.name == 'saml' }
+ end
+
+ def groups
+ options[:groups_attribute]
+ end
+
+ def external_groups
+ options[:external_groups]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/saml/user.rb b/lib/gitlab/auth/saml/user.rb
new file mode 100644
index 00000000000..d4024e9ec39
--- /dev/null
+++ b/lib/gitlab/auth/saml/user.rb
@@ -0,0 +1,52 @@
+# SAML extension for User model
+#
+# * Find GitLab user based on SAML uid and provider
+# * Create new user from SAML data
+#
+module Gitlab
+ module Auth
+ module Saml
+ class User < Gitlab::Auth::OAuth::User
+ def save
+ super('SAML')
+ end
+
+ def find_user
+ user = find_by_uid_and_provider
+
+ user ||= find_by_email if auto_link_saml_user?
+ user ||= find_or_build_ldap_user if auto_link_ldap_user?
+ user ||= build_new_user if signup_enabled?
+
+ if external_users_enabled? && user
+ # Check if there is overlap between the user's groups and the external groups
+ # setting then set user as external or internal.
+ user.external = !(auth_hash.groups & Gitlab::Auth::Saml::Config.external_groups).empty?
+ end
+
+ user
+ end
+
+ def changed?
+ return true unless gl_user
+
+ gl_user.changed? || gl_user.identities.any?(&:changed?)
+ end
+
+ protected
+
+ def auto_link_saml_user?
+ Gitlab.config.omniauth.auto_link_saml_user
+ end
+
+ def external_users_enabled?
+ !Gitlab::Auth::Saml::Config.external_groups.nil?
+ end
+
+ def auth_hash=(auth_hash)
+ @auth_hash = Gitlab::Auth::Saml::AuthHash.new(auth_hash)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/auth/user_auth_finders.rb b/lib/gitlab/auth/user_auth_finders.rb
new file mode 100644
index 00000000000..cf02030c577
--- /dev/null
+++ b/lib/gitlab/auth/user_auth_finders.rb
@@ -0,0 +1,107 @@
+module Gitlab
+ module Auth
+ #
+ # Exceptions
+ #
+
+ AuthenticationError = Class.new(StandardError)
+ MissingTokenError = Class.new(AuthenticationError)
+ TokenNotFoundError = Class.new(AuthenticationError)
+ ExpiredError = Class.new(AuthenticationError)
+ RevokedError = Class.new(AuthenticationError)
+ UnauthorizedError = Class.new(AuthenticationError)
+
+ class InsufficientScopeError < AuthenticationError
+ attr_reader :scopes
+ def initialize(scopes)
+ @scopes = scopes.map { |s| s.try(:name) || s }
+ end
+ end
+
+ module UserAuthFinders
+ include Gitlab::Utils::StrongMemoize
+
+ PRIVATE_TOKEN_HEADER = 'HTTP_PRIVATE_TOKEN'.freeze
+ PRIVATE_TOKEN_PARAM = :private_token
+
+ # Check the Rails session for valid authentication details
+ def find_user_from_warden
+ current_request.env['warden']&.authenticate if verified_request?
+ end
+
+ def find_user_from_rss_token
+ return unless current_request.path.ends_with?('.atom') || current_request.format.atom?
+
+ token = current_request.params[:rss_token].presence
+ return unless token
+
+ User.find_by_rss_token(token) || raise(UnauthorizedError)
+ end
+
+ def find_user_from_access_token
+ return unless access_token
+
+ validate_access_token!
+
+ access_token.user || raise(UnauthorizedError)
+ end
+
+ def validate_access_token!(scopes: [])
+ return unless access_token
+
+ case AccessTokenValidationService.new(access_token, request: request).validate(scopes: scopes)
+ when AccessTokenValidationService::INSUFFICIENT_SCOPE
+ raise InsufficientScopeError.new(scopes)
+ when AccessTokenValidationService::EXPIRED
+ raise ExpiredError
+ when AccessTokenValidationService::REVOKED
+ raise RevokedError
+ end
+ end
+
+ private
+
+ def access_token
+ strong_memoize(:access_token) do
+ find_oauth_access_token || find_personal_access_token
+ end
+ end
+
+ def find_personal_access_token
+ token =
+ current_request.params[PRIVATE_TOKEN_PARAM].presence ||
+ current_request.env[PRIVATE_TOKEN_HEADER].presence
+
+ return unless token
+
+ # Expiration, revocation and scopes are verified in `validate_access_token!`
+ PersonalAccessToken.find_by(token: token) || raise(UnauthorizedError)
+ end
+
+ def find_oauth_access_token
+ token = Doorkeeper::OAuth::Token.from_request(current_request, *Doorkeeper.configuration.access_token_methods)
+ return unless token
+
+ # Expiration, revocation and scopes are verified in `validate_access_token!`
+ oauth_token = OauthAccessToken.by_token(token)
+ raise UnauthorizedError unless oauth_token
+
+ oauth_token.revoke_previous_refresh_token!
+ oauth_token
+ end
+
+ # Check if the request is GET/HEAD, or if CSRF token is valid.
+ def verified_request?
+ Gitlab::RequestForgeryProtection.verified?(current_request.env)
+ end
+
+ def ensure_action_dispatch_request(request)
+ ActionDispatch::Request.new(request.env.dup)
+ end
+
+ def current_request
+ @current_request ||= ensure_action_dispatch_request(request)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/.rubocop.yml b/lib/gitlab/background_migration/.rubocop.yml
new file mode 100644
index 00000000000..8242821cedc
--- /dev/null
+++ b/lib/gitlab/background_migration/.rubocop.yml
@@ -0,0 +1,52 @@
+# For background migrations we define a custom set of rules to make it less
+# difficult to review these migrations. To reduce the complexity of these
+# migrations some rules may be stricter than the defaults set in the root
+# .rubocop.yml file.
+---
+inherit_from: ../../../.rubocop.yml
+
+Metrics/AbcSize:
+ Enabled: true
+ Max: 30
+ Details: >
+ Code that involves a lot of branches can be very hard to wrap your head
+ around.
+
+Metrics/PerceivedComplexity:
+ Enabled: true
+
+Metrics/LineLength:
+ Enabled: true
+ Details: >
+ Long lines are very hard to read and make it more difficult to review
+ changes.
+
+Metrics/MethodLength:
+ Enabled: true
+ Max: 30
+ Details: >
+ Long methods can be very hard to review. Consider splitting this method up
+ into separate methods.
+
+Metrics/ClassLength:
+ Enabled: true
+ Details: >
+ Long classes can be very hard to review. Consider splitting this class up
+ into multiple classes.
+
+Metrics/BlockLength:
+ Enabled: true
+ Details: >
+ Long blocks can be hard to read. Consider splitting the code into separate
+ methods.
+
+Style/Documentation:
+ Enabled: true
+ Details: >
+ Adding documentation makes it easier to figure out what a migration is
+ supposed to do.
+
+Style/FrozenStringLiteralComment:
+ Enabled: true
+ Details: >-
+ This removes the need for calling "freeze", reducing noise in the code.
diff --git a/lib/gitlab/background_migration/add_merge_request_diff_commits_count.rb b/lib/gitlab/background_migration/add_merge_request_diff_commits_count.rb
new file mode 100644
index 00000000000..7bffffec94d
--- /dev/null
+++ b/lib/gitlab/background_migration/add_merge_request_diff_commits_count.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+# rubocop:disable Metrics/LineLength
+
+module Gitlab
+ module BackgroundMigration
+ class AddMergeRequestDiffCommitsCount
+ class MergeRequestDiff < ActiveRecord::Base
+ self.table_name = 'merge_request_diffs'
+ end
+
+ def perform(start_id, stop_id)
+ Rails.logger.info("Setting commits_count for merge request diffs: #{start_id} - #{stop_id}")
+
+ update = '
+ commits_count = (
+ SELECT count(*)
+ FROM merge_request_diff_commits
+ WHERE merge_request_diffs.id = merge_request_diff_commits.merge_request_diff_id
+ )'.squish
+
+ MergeRequestDiff.where(id: start_id..stop_id).update_all(update)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/cleanup_concurrent_type_change.rb b/lib/gitlab/background_migration/cleanup_concurrent_type_change.rb
new file mode 100644
index 00000000000..de622f657b2
--- /dev/null
+++ b/lib/gitlab/background_migration/cleanup_concurrent_type_change.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Background migration for cleaning up a concurrent column rename.
+ class CleanupConcurrentTypeChange
+ include Database::MigrationHelpers
+
+ RESCHEDULE_DELAY = 10.minutes
+
+ # table - The name of the table the migration is performed for.
+ # old_column - The name of the old (to drop) column.
+ # new_column - The name of the new column.
+ def perform(table, old_column, new_column)
+ return unless column_exists?(:issues, new_column)
+
+ rows_to_migrate = define_model_for(table)
+ .where(new_column => nil)
+ .where
+ .not(old_column => nil)
+
+ if rows_to_migrate.any?
+ BackgroundMigrationWorker.perform_in(
+ RESCHEDULE_DELAY,
+ 'CleanupConcurrentTypeChange',
+ [table, old_column, new_column]
+ )
+ else
+ cleanup_concurrent_column_type_change(table, old_column)
+ end
+ end
+
+ # These methods are necessary so we can re-use the migration helpers in
+ # this class.
+ def connection
+ ActiveRecord::Base.connection
+ end
+
+ def method_missing(name, *args, &block)
+ connection.__send__(name, *args, &block) # rubocop: disable GitlabSecurity/PublicSend
+ end
+
+ def respond_to_missing?(*args)
+ connection.respond_to?(*args) || super
+ end
+
+ def define_model_for(table)
+ Class.new(ActiveRecord::Base) do
+ self.table_name = table
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/copy_column.rb b/lib/gitlab/background_migration/copy_column.rb
new file mode 100644
index 00000000000..ef70f37d5eb
--- /dev/null
+++ b/lib/gitlab/background_migration/copy_column.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # CopyColumn is a simple (reusable) background migration that can be used to
+ # update the value of a column based on the value of another column in the
+ # same table.
+ #
+ # For this background migration to work the table that is migrated _has_ to
+ # have an `id` column as the primary key.
+ class CopyColumn
+ # table - The name of the table that contains the columns.
+ # copy_from - The column containing the data to copy.
+ # copy_to - The column to copy the data to.
+ # start_id - The start ID of the range of rows to update.
+ # end_id - The end ID of the range of rows to update.
+ def perform(table, copy_from, copy_to, start_id, end_id)
+ return unless connection.column_exists?(table, copy_to)
+
+ quoted_table = connection.quote_table_name(table)
+ quoted_copy_from = connection.quote_column_name(copy_from)
+ quoted_copy_to = connection.quote_column_name(copy_to)
+
+ # We're using raw SQL here since this job may be frequently executed. As
+ # a result dynamically defining models would lead to many unnecessary
+ # schema information queries.
+ connection.execute <<-SQL.strip_heredoc
+ UPDATE #{quoted_table}
+ SET #{quoted_copy_to} = #{quoted_copy_from}
+ WHERE id BETWEEN #{start_id} AND #{end_id}
+ AND #{quoted_copy_from} IS NOT NULL
+ AND #{quoted_copy_to} IS NULL
+ SQL
+ end
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/create_fork_network_memberships_range.rb b/lib/gitlab/background_migration/create_fork_network_memberships_range.rb
new file mode 100644
index 00000000000..1b4a9e8a194
--- /dev/null
+++ b/lib/gitlab/background_migration/create_fork_network_memberships_range.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class CreateForkNetworkMembershipsRange
+ RESCHEDULE_DELAY = 15
+
+ class ForkedProjectLink < ActiveRecord::Base
+ self.table_name = 'forked_project_links'
+ end
+
+ def perform(start_id, end_id)
+ log("Creating memberships for forks: #{start_id} - #{end_id}")
+
+ insert_members(start_id, end_id)
+
+ if missing_members?(start_id, end_id)
+ BackgroundMigrationWorker.perform_in(RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [start_id, end_id])
+ end
+ end
+
+ def insert_members(start_id, end_id)
+ ActiveRecord::Base.connection.execute <<~INSERT_MEMBERS
+ INSERT INTO fork_network_members (fork_network_id, project_id, forked_from_project_id)
+
+ SELECT fork_network_members.fork_network_id,
+ forked_project_links.forked_to_project_id,
+ forked_project_links.forked_from_project_id
+
+ FROM forked_project_links
+
+ INNER JOIN fork_network_members
+ ON forked_project_links.forked_from_project_id = fork_network_members.project_id
+
+ WHERE forked_project_links.id BETWEEN #{start_id} AND #{end_id}
+ AND NOT EXISTS (
+ SELECT true
+ FROM fork_network_members existing_members
+ WHERE existing_members.project_id = forked_project_links.forked_to_project_id
+ )
+ INSERT_MEMBERS
+ rescue ActiveRecord::RecordNotUnique => e
+ # `fork_network_member` was created concurrently in another migration
+ log(e.message)
+ end
+
+ def missing_members?(start_id, end_id)
+ count_sql = <<~MISSING_MEMBERS
+ SELECT COUNT(*)
+
+ FROM forked_project_links
+
+ WHERE NOT EXISTS (
+ SELECT true
+ FROM fork_network_members
+ WHERE fork_network_members.project_id = forked_project_links.forked_to_project_id
+ )
+ AND EXISTS (
+ SELECT true
+ FROM projects
+ WHERE forked_project_links.forked_from_project_id = projects.id
+ )
+ AND NOT EXISTS (
+ SELECT true
+ FROM forked_project_links AS parent_links
+ WHERE parent_links.forked_to_project_id = forked_project_links.forked_from_project_id
+ AND NOT EXISTS (
+ SELECT true
+ FROM projects
+ WHERE parent_links.forked_from_project_id = projects.id
+ )
+ )
+ AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
+ MISSING_MEMBERS
+
+ ForkedProjectLink.count_by_sql(count_sql) > 0
+ end
+
+ def log(message)
+ Rails.logger.info("#{self.class.name} - #{message}")
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb b/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb
new file mode 100644
index 00000000000..c2bf42f846d
--- /dev/null
+++ b/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Style/Documentation
+
+class Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys
+ class GpgKey < ActiveRecord::Base
+ self.table_name = 'gpg_keys'
+
+ include EachBatch
+ include ShaAttribute
+
+ sha_attribute :primary_keyid
+ sha_attribute :fingerprint
+
+ has_many :subkeys, class_name: 'GpgKeySubkey'
+ end
+
+ class GpgKeySubkey < ActiveRecord::Base
+ self.table_name = 'gpg_key_subkeys'
+
+ include ShaAttribute
+
+ sha_attribute :keyid
+ sha_attribute :fingerprint
+ end
+
+ def perform(gpg_key_id)
+ gpg_key = GpgKey.find_by(id: gpg_key_id)
+
+ return if gpg_key.nil?
+ return if gpg_key.subkeys.any?
+
+ create_subkeys(gpg_key)
+ update_signatures(gpg_key)
+ end
+
+ private
+
+ def create_subkeys(gpg_key)
+ gpg_subkeys = Gitlab::Gpg.subkeys_from_key(gpg_key.key)
+
+ gpg_subkeys[gpg_key.primary_keyid.upcase]&.each do |subkey_data|
+ gpg_key.subkeys.build(keyid: subkey_data[:keyid], fingerprint: subkey_data[:fingerprint])
+ end
+
+ # Improve latency by doing all INSERTs in a single call
+ GpgKey.transaction do
+ gpg_key.save!
+ end
+ end
+
+ def update_signatures(gpg_key)
+ return unless gpg_key.subkeys.exists?
+
+ InvalidGpgSignatureUpdateWorker.perform_async(gpg_key.id)
+ end
+end
diff --git a/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range.rb b/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range.rb
index b1411be3016..21b626dde56 100644
--- a/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range.rb
+++ b/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range.rb
@@ -1,40 +1,12 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
module Gitlab
module BackgroundMigration
class DeleteConflictingRedirectRoutesRange
- class Route < ActiveRecord::Base
- self.table_name = 'routes'
- end
-
- class RedirectRoute < ActiveRecord::Base
- self.table_name = 'redirect_routes'
- end
-
- # start_id - The start ID of the range of events to process
- # end_id - The end ID of the range to process.
def perform(start_id, end_id)
- return unless migrate?
-
- conflicts = RedirectRoute.where(routes_match_redirects_clause(start_id, end_id))
- num_rows = conflicts.delete_all
-
- Rails.logger.info("Gitlab::BackgroundMigration::DeleteConflictingRedirectRoutesRange [#{start_id}, #{end_id}] - Deleted #{num_rows} redirect routes that were conflicting with routes.")
- end
-
- def migrate?
- Route.table_exists? && RedirectRoute.table_exists?
- end
-
- def routes_match_redirects_clause(start_id, end_id)
- <<~ROUTES_MATCH_REDIRECTS
- EXISTS (
- SELECT 1 FROM routes
- WHERE (
- LOWER(redirect_routes.path) = LOWER(routes.path)
- OR LOWER(redirect_routes.path) LIKE LOWER(CONCAT(routes.path, '/%'))
- )
- AND routes.id BETWEEN #{start_id} AND #{end_id}
- )
- ROUTES_MATCH_REDIRECTS
+ # No-op.
+ # See https://gitlab.com/gitlab-com/infrastructure/issues/3460#note_53223252
end
end
end
diff --git a/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits.rb b/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits.rb
index 3fde1b09efb..fd5cbf76e47 100644
--- a/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits.rb
+++ b/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits.rb
@@ -1,13 +1,26 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/MethodLength
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Metrics/AbcSize
+# rubocop:disable Style/Documentation
+
module Gitlab
module BackgroundMigration
class DeserializeMergeRequestDiffsAndCommits
attr_reader :diff_ids, :commit_rows, :file_rows
+ class Error < StandardError
+ def backtrace
+ cause.backtrace
+ end
+ end
+
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
end
BUFFER_ROWS = 1000
+ DIFF_FILE_BUFFER_ROWS = 100
def perform(start_id, stop_id)
merge_request_diffs = MergeRequestDiff
@@ -26,13 +39,17 @@ module Gitlab
if diff_ids.length > BUFFER_ROWS ||
commit_rows.length > BUFFER_ROWS ||
- file_rows.length > BUFFER_ROWS
+ file_rows.length > DIFF_FILE_BUFFER_ROWS
flush_buffers!
end
end
flush_buffers!
+ rescue => e
+ Rails.logger.info("#{self.class.name}: failed for IDs #{merge_request_diffs.map(&:id)} with #{e.class.name}")
+
+ raise Error.new(e.inspect)
end
private
@@ -45,20 +62,32 @@ module Gitlab
def flush_buffers!
if diff_ids.any?
- MergeRequestDiff.transaction do
- Gitlab::Database.bulk_insert('merge_request_diff_commits', commit_rows)
- Gitlab::Database.bulk_insert('merge_request_diff_files', file_rows)
+ commit_rows.each_slice(BUFFER_ROWS).each do |commit_rows_slice|
+ bulk_insert('merge_request_diff_commits', commit_rows_slice)
+ end
- MergeRequestDiff.where(id: diff_ids).update_all(st_commits: nil, st_diffs: nil)
+ file_rows.each_slice(DIFF_FILE_BUFFER_ROWS).each do |file_rows_slice|
+ bulk_insert('merge_request_diff_files', file_rows_slice)
end
+
+ MergeRequestDiff.where(id: diff_ids).update_all(st_commits: nil, st_diffs: nil)
end
reset_buffers!
end
+ def bulk_insert(table, rows)
+ Gitlab::Database.bulk_insert(table, rows)
+ rescue ActiveRecord::RecordNotUnique
+ ids = rows.map { |row| row[:merge_request_diff_id] }.uniq.sort
+
+ Rails.logger.info("#{self.class.name}: rows inserted twice for IDs #{ids}")
+ end
+
def single_diff_rows(merge_request_diff)
sha_attribute = Gitlab::Database::ShaAttribute.new
commits = YAML.load(merge_request_diff.st_commits) rescue []
+ commits ||= []
commit_rows = commits.map.with_index do |commit, index|
commit_hash = commit.to_hash.with_indifferent_access.except(:parent_ids)
diff --git a/lib/gitlab/background_migration/migrate_build_stage.rb b/lib/gitlab/background_migration/migrate_build_stage.rb
new file mode 100644
index 00000000000..8fe4f1a2289
--- /dev/null
+++ b/lib/gitlab/background_migration/migrate_build_stage.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/AbcSize
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class MigrateBuildStage
+ module Migratable
+ class Stage < ActiveRecord::Base
+ self.table_name = 'ci_stages'
+ end
+
+ class Build < ActiveRecord::Base
+ self.table_name = 'ci_builds'
+
+ def ensure_stage!(attempts: 2)
+ find_stage || create_stage!
+ rescue ActiveRecord::RecordNotUnique
+ retry if (attempts -= 1) > 0
+ raise
+ end
+
+ def find_stage
+ Stage.find_by(name: self.stage || 'test',
+ pipeline_id: self.commit_id,
+ project_id: self.project_id)
+ end
+
+ def create_stage!
+ Stage.create!(name: self.stage || 'test',
+ pipeline_id: self.commit_id,
+ project_id: self.project_id)
+ end
+ end
+ end
+
+ def perform(start_id, stop_id)
+ stages = Migratable::Build.where('stage_id IS NULL')
+ .where('id BETWEEN ? AND ?', start_id, stop_id)
+ .map { |build| build.ensure_stage! }
+ .compact.map(&:id)
+
+ MigrateBuildStageIdReference.new.perform(start_id, stop_id)
+ MigrateStageStatus.new.perform(stages.min, stages.max)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/migrate_build_stage_id_reference.rb b/lib/gitlab/background_migration/migrate_build_stage_id_reference.rb
index 91540127ea9..0a8a4313cd5 100644
--- a/lib/gitlab/background_migration/migrate_build_stage_id_reference.rb
+++ b/lib/gitlab/background_migration/migrate_build_stage_id_reference.rb
@@ -1,3 +1,6 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
module Gitlab
module BackgroundMigration
class MigrateBuildStageIdReference
diff --git a/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb b/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb
index 432f7c3e706..7088aa0860a 100644
--- a/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb
+++ b/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb
@@ -1,3 +1,7 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Style/Documentation
+
module Gitlab
module BackgroundMigration
# Class that migrates events for the new push event payloads setup. All
@@ -124,8 +128,14 @@ module Gitlab
end
def process_event(event)
- replicate_event(event)
- create_push_event_payload(event) if event.push_event?
+ ActiveRecord::Base.transaction do
+ replicate_event(event)
+ create_push_event_payload(event) if event.push_event?
+ end
+ rescue ActiveRecord::InvalidForeignKey => e
+ # A foreign key error means the associated event was removed. In this
+ # case we'll just skip migrating the event.
+ Rails.logger.error("Unable to migrate event #{event.id}: #{e}")
end
def replicate_event(event)
@@ -133,9 +143,6 @@ module Gitlab
.with_indifferent_access.except(:title, :data)
EventForMigration.create!(new_attributes)
- rescue ActiveRecord::InvalidForeignKey
- # A foreign key error means the associated event was removed. In this
- # case we'll just skip migrating the event.
end
def create_push_event_payload(event)
@@ -152,9 +159,6 @@ module Gitlab
ref: event.trimmed_ref_name,
commit_title: event.commit_title
)
- rescue ActiveRecord::InvalidForeignKey
- # A foreign key error means the associated event was removed. In this
- # case we'll just skip migrating the event.
end
def find_events(start_id, end_id)
diff --git a/lib/gitlab/background_migration/migrate_stage_status.rb b/lib/gitlab/background_migration/migrate_stage_status.rb
index b1ff0900709..0e5c7f092f2 100644
--- a/lib/gitlab/background_migration/migrate_stage_status.rb
+++ b/lib/gitlab/background_migration/migrate_stage_status.rb
@@ -1,3 +1,7 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/AbcSize
+# rubocop:disable Style/Documentation
+
module Gitlab
module BackgroundMigration
class MigrateStageStatus
diff --git a/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder.rb b/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder.rb
index 0881244ed49..7f243073fd0 100644
--- a/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder.rb
+++ b/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder.rb
@@ -1,3 +1,7 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Style/Documentation
+
module Gitlab
module BackgroundMigration
class MigrateSystemUploadsToNewFolder
diff --git a/lib/gitlab/background_migration/move_personal_snippet_files.rb b/lib/gitlab/background_migration/move_personal_snippet_files.rb
index 07cec96bcc3..a4ef51fd0e8 100644
--- a/lib/gitlab/background_migration/move_personal_snippet_files.rb
+++ b/lib/gitlab/background_migration/move_personal_snippet_files.rb
@@ -1,3 +1,7 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Style/Documentation
+
module Gitlab
module BackgroundMigration
class MovePersonalSnippetFiles
diff --git a/lib/gitlab/background_migration/normalize_ldap_extern_uids_range.rb b/lib/gitlab/background_migration/normalize_ldap_extern_uids_range.rb
new file mode 100644
index 00000000000..d9d3d2e667b
--- /dev/null
+++ b/lib/gitlab/background_migration/normalize_ldap_extern_uids_range.rb
@@ -0,0 +1,322 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/MethodLength
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Metrics/ClassLength
+# rubocop:disable Metrics/BlockLength
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class NormalizeLdapExternUidsRange
+ class Identity < ActiveRecord::Base
+ self.table_name = 'identities'
+ end
+
+ # Copied this class to make this migration resilient to future code changes.
+ # And if the normalize behavior is changed in the future, it must be
+ # accompanied by another migration.
+ module Gitlab
+ module Auth
+ module LDAP
+ class DN
+ FormatError = Class.new(StandardError)
+ MalformedError = Class.new(FormatError)
+ UnsupportedError = Class.new(FormatError)
+
+ def self.normalize_value(given_value)
+ dummy_dn = "placeholder=#{given_value}"
+ normalized_dn = new(*dummy_dn).to_normalized_s
+ normalized_dn.sub(/\Aplaceholder=/, '')
+ end
+
+ ##
+ # Initialize a DN, escaping as required. Pass in attributes in name/value
+ # pairs. If there is a left over argument, it will be appended to the dn
+ # without escaping (useful for a base string).
+ #
+ # Most uses of this class will be to escape a DN, rather than to parse it,
+ # so storing the dn as an escaped String and parsing parts as required
+ # with a state machine seems sensible.
+ def initialize(*args)
+ if args.length > 1
+ initialize_array(args)
+ else
+ initialize_string(args[0])
+ end
+ end
+
+ ##
+ # Parse a DN into key value pairs using ASN from
+ # http://tools.ietf.org/html/rfc2253 section 3.
+ # rubocop:disable Metrics/AbcSize
+ # rubocop:disable Metrics/CyclomaticComplexity
+ # rubocop:disable Metrics/PerceivedComplexity
+ def each_pair
+ state = :key
+ key = StringIO.new
+ value = StringIO.new
+ hex_buffer = ""
+
+ @dn.each_char.with_index do |char, dn_index|
+ case state
+ when :key then
+ case char
+ when 'a'..'z', 'A'..'Z' then
+ state = :key_normal
+ key << char
+ when '0'..'9' then
+ state = :key_oid
+ key << char
+ when ' ' then state = :key
+ else raise(MalformedError, "Unrecognized first character of an RDN attribute type name \"#{char}\"")
+ end
+ when :key_normal then
+ case char
+ when '=' then state = :value
+ when 'a'..'z', 'A'..'Z', '0'..'9', '-', ' ' then key << char
+ else raise(MalformedError, "Unrecognized RDN attribute type name character \"#{char}\"")
+ end
+ when :key_oid then
+ case char
+ when '=' then state = :value
+ when '0'..'9', '.', ' ' then key << char
+ else raise(MalformedError, "Unrecognized RDN OID attribute type name character \"#{char}\"")
+ end
+ when :value then
+ case char
+ when '\\' then state = :value_normal_escape
+ when '"' then state = :value_quoted
+ when ' ' then state = :value
+ when '#' then
+ state = :value_hexstring
+ value << char
+ when ',' then
+ state = :key
+ yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
+ key = StringIO.new
+ value = StringIO.new
+ else
+ state = :value_normal
+ value << char
+ end
+ when :value_normal then
+ case char
+ when '\\' then state = :value_normal_escape
+ when ',' then
+ state = :key
+ yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
+ key = StringIO.new
+ value = StringIO.new
+ when '+' then raise(UnsupportedError, "Multivalued RDNs are not supported")
+ else value << char
+ end
+ when :value_normal_escape then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_normal_escape_hex
+ hex_buffer = char
+ else
+ state = :value_normal
+ value << char
+ end
+ when :value_normal_escape_hex then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_normal
+ value << "#{hex_buffer}#{char}".to_i(16).chr
+ else raise(MalformedError, "Invalid escaped hex code \"\\#{hex_buffer}#{char}\"")
+ end
+ when :value_quoted then
+ case char
+ when '\\' then state = :value_quoted_escape
+ when '"' then state = :value_end
+ else value << char
+ end
+ when :value_quoted_escape then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_quoted_escape_hex
+ hex_buffer = char
+ else
+ state = :value_quoted
+ value << char
+ end
+ when :value_quoted_escape_hex then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_quoted
+ value << "#{hex_buffer}#{char}".to_i(16).chr
+ else raise(MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"#{char}\"")
+ end
+ when :value_hexstring then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_hexstring_hex
+ value << char
+ when ' ' then state = :value_end
+ when ',' then
+ state = :key
+ yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
+ key = StringIO.new
+ value = StringIO.new
+ else raise(MalformedError, "Expected the first character of a hex pair, but got \"#{char}\"")
+ end
+ when :value_hexstring_hex then
+ case char
+ when '0'..'9', 'a'..'f', 'A'..'F' then
+ state = :value_hexstring
+ value << char
+ else raise(MalformedError, "Expected the second character of a hex pair, but got \"#{char}\"")
+ end
+ when :value_end then
+ case char
+ when ' ' then state = :value_end
+ when ',' then
+ state = :key
+ yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
+ key = StringIO.new
+ value = StringIO.new
+ else raise(MalformedError, "Expected the end of an attribute value, but got \"#{char}\"")
+ end
+ else raise "Fell out of state machine"
+ end
+ end
+
+ # Last pair
+ raise(MalformedError, 'DN string ended unexpectedly') unless
+ [:value, :value_normal, :value_hexstring, :value_end].include? state
+
+ yield key.string.strip, rstrip_except_escaped(value.string, @dn.length)
+ end
+
+ def rstrip_except_escaped(str, dn_index)
+ str_ends_with_whitespace = str.match(/\s\z/)
+
+ if str_ends_with_whitespace
+ dn_part_ends_with_escaped_whitespace = @dn[0, dn_index].match(/\\(\s+)\z/)
+
+ if dn_part_ends_with_escaped_whitespace
+ dn_part_rwhitespace = dn_part_ends_with_escaped_whitespace[1]
+ num_chars_to_remove = dn_part_rwhitespace.length - 1
+ str = str[0, str.length - num_chars_to_remove]
+ else
+ str.rstrip!
+ end
+ end
+
+ str
+ end
+
+ ##
+ # Returns the DN as an array in the form expected by the constructor.
+ def to_a
+ a = []
+ self.each_pair { |key, value| a << key << value } unless @dn.empty?
+ a
+ end
+
+ ##
+ # Return the DN as an escaped string.
+ def to_s
+ @dn
+ end
+
+ ##
+ # Return the DN as an escaped and normalized string.
+ def to_normalized_s
+ self.class.new(*to_a).to_s.downcase
+ end
+
+ # https://tools.ietf.org/html/rfc4514 section 2.4 lists these exceptions
+ # for DN values. All of the following must be escaped in any normal string
+ # using a single backslash ('\') as escape. The space character is left
+ # out here because in a "normalized" string, spaces should only be escaped
+ # if necessary (i.e. leading or trailing space).
+ NORMAL_ESCAPES = [',', '+', '"', '\\', '<', '>', ';', '='].freeze
+
+ # The following must be represented as escaped hex
+ HEX_ESCAPES = {
+ "\n" => '\0a',
+ "\r" => '\0d'
+ }.freeze
+
+ # Compiled character class regexp using the keys from the above hash, and
+ # checking for a space or # at the start, or space at the end, of the
+ # string.
+ ESCAPE_RE = Regexp.new("(^ |^#| $|[" +
+ NORMAL_ESCAPES.map { |e| Regexp.escape(e) }.join +
+ "])")
+
+ HEX_ESCAPE_RE = Regexp.new("([" +
+ HEX_ESCAPES.keys.map { |e| Regexp.escape(e) }.join +
+ "])")
+
+ ##
+ # Escape a string for use in a DN value
+ def self.escape(string)
+ escaped = string.gsub(ESCAPE_RE) { |char| "\\" + char }
+ escaped.gsub(HEX_ESCAPE_RE) { |char| HEX_ESCAPES[char] }
+ end
+
+ private
+
+ def initialize_array(args)
+ buffer = StringIO.new
+
+ args.each_with_index do |arg, index|
+ if index.even? # key
+ buffer << "," if index > 0
+ buffer << arg
+ else # value
+ buffer << "="
+ buffer << self.class.escape(arg)
+ end
+ end
+
+ @dn = buffer.string
+ end
+
+ def initialize_string(arg)
+ @dn = arg.to_s
+ end
+
+ ##
+ # Proxy all other requests to the string object, because a DN is mainly
+ # used within the library as a string
+ # rubocop:disable GitlabSecurity/PublicSend
+ def method_missing(method, *args, &block)
+ @dn.send(method, *args, &block)
+ end
+
+ ##
+ # Redefined to be consistent with redefined `method_missing` behavior
+ def respond_to?(sym, include_private = false)
+ @dn.respond_to?(sym, include_private)
+ end
+ end
+ end
+ end
+ end
+
+ def perform(start_id, end_id)
+ return unless migrate?
+
+ ldap_identities = Identity.where("provider like 'ldap%'").where(id: start_id..end_id)
+ ldap_identities.each do |identity|
+ begin
+ identity.extern_uid = Gitlab::Auth::LDAP::DN.new(identity.extern_uid).to_normalized_s
+ unless identity.save
+ Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\". Skipping."
+ end
+ rescue Gitlab::Auth::LDAP::DN::FormatError => e
+ Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\" due to \"#{e.message}\". Skipping."
+ end
+ end
+ end
+
+ def migrate?
+ Identity.table_exists?
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/populate_fork_networks_range.rb b/lib/gitlab/background_migration/populate_fork_networks_range.rb
new file mode 100644
index 00000000000..a976cb4c243
--- /dev/null
+++ b/lib/gitlab/background_migration/populate_fork_networks_range.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # This background migration is going to create all `fork_networks` and
+ # the `fork_network_members` for the roots of fork networks based on the
+ # existing `forked_project_links`.
+ #
+ # When the source of a fork is deleted, we will create the fork with the
+ # target project as the root. This way, when there are forks of the target
+ # project, they will be joined into the same fork network.
+ #
+ # When the `fork_networks` and memberships for the root projects are created
+ # the `CreateForkNetworkMembershipsRange` migration is scheduled. This
+ # migration will create the memberships for all remaining forks-of-forks
+ class PopulateForkNetworksRange
+ def perform(start_id, end_id)
+ create_fork_networks_for_existing_projects(start_id, end_id)
+ create_fork_networks_for_missing_projects(start_id, end_id)
+ create_fork_networks_memberships_for_root_projects(start_id, end_id)
+
+ delay = BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY # rubocop:disable Metrics/LineLength
+ BackgroundMigrationWorker.perform_in(
+ delay, "CreateForkNetworkMembershipsRange", [start_id, end_id]
+ )
+ end
+
+ def create_fork_networks_for_existing_projects(start_id, end_id)
+ log("Creating fork networks: #{start_id} - #{end_id}")
+ ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
+ INSERT INTO fork_networks (root_project_id)
+ SELECT DISTINCT forked_project_links.forked_from_project_id
+
+ FROM forked_project_links
+
+ -- Exclude the forks that are not the first level fork of a project
+ WHERE NOT EXISTS (
+ SELECT true
+ FROM forked_project_links inner_links
+ WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
+ )
+
+ /* Exclude the ones that are already created, in case the fork network
+ was already created for another fork of the project.
+ */
+ AND NOT EXISTS (
+ SELECT true
+ FROM fork_networks
+ WHERE forked_project_links.forked_from_project_id = fork_networks.root_project_id
+ )
+
+ -- Only create a fork network for a root project that still exists
+ AND EXISTS (
+ SELECT true
+ FROM projects
+ WHERE projects.id = forked_project_links.forked_from_project_id
+ )
+ AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
+ INSERT_NETWORKS
+ end
+
+ def create_fork_networks_for_missing_projects(start_id, end_id)
+ log("Creating fork networks with missing root: #{start_id} - #{end_id}")
+ ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
+ INSERT INTO fork_networks (root_project_id)
+ SELECT DISTINCT forked_project_links.forked_to_project_id
+
+ FROM forked_project_links
+
+ -- Exclude forks that are not the root forks
+ WHERE NOT EXISTS (
+ SELECT true
+ FROM forked_project_links inner_links
+ WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
+ )
+
+ /* Exclude the ones that are already created, in case this migration is
+ re-run
+ */
+ AND NOT EXISTS (
+ SELECT true
+ FROM fork_networks
+ WHERE forked_project_links.forked_to_project_id = fork_networks.root_project_id
+ )
+
+ /* Exclude projects for which the project still exists, those are
+ Processed in the previous step of this migration
+ */
+ AND NOT EXISTS (
+ SELECT true
+ FROM projects
+ WHERE projects.id = forked_project_links.forked_from_project_id
+ )
+ AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
+ INSERT_NETWORKS
+ end
+
+ def create_fork_networks_memberships_for_root_projects(start_id, end_id)
+ log("Creating memberships for root projects: #{start_id} - #{end_id}")
+
+ ActiveRecord::Base.connection.execute <<~INSERT_ROOT
+ INSERT INTO fork_network_members (fork_network_id, project_id)
+ SELECT DISTINCT fork_networks.id, fork_networks.root_project_id
+
+ FROM fork_networks
+
+ /* Joining both on forked_from- and forked_to- so we could create the
+ memberships for forks for which the source was deleted
+ */
+ INNER JOIN forked_project_links
+ ON forked_project_links.forked_from_project_id = fork_networks.root_project_id
+ OR forked_project_links.forked_to_project_id = fork_networks.root_project_id
+
+ WHERE NOT EXISTS (
+ SELECT true
+ FROM fork_network_members
+ WHERE fork_network_members.project_id = fork_networks.root_project_id
+ )
+ AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
+ INSERT_ROOT
+ end
+
+ def log(message)
+ Rails.logger.info("#{self.class.name} - #{message}")
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data.rb b/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data.rb
new file mode 100644
index 00000000000..8a901a9bf39
--- /dev/null
+++ b/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+# rubocop:disable Metrics/LineLength
+# rubocop:disable Metrics/MethodLength
+# rubocop:disable Metrics/ClassLength
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class PopulateMergeRequestMetricsWithEventsData
+ def perform(min_merge_request_id, max_merge_request_id)
+ insert_metrics_for_range(min_merge_request_id, max_merge_request_id)
+ update_metrics_with_events_data(min_merge_request_id, max_merge_request_id)
+ end
+
+ # Inserts merge_request_metrics records for merge_requests without it for
+ # a given merge request batch.
+ def insert_metrics_for_range(min, max)
+ metrics_not_exists_clause =
+ <<-SQL.strip_heredoc
+ NOT EXISTS (SELECT 1 FROM merge_request_metrics
+ WHERE merge_request_metrics.merge_request_id = merge_requests.id)
+ SQL
+
+ MergeRequest.where(metrics_not_exists_clause).where(id: min..max).each_batch do |batch|
+ select_sql = batch.select(:id, :created_at, :updated_at).to_sql
+
+ execute("INSERT INTO merge_request_metrics (merge_request_id, created_at, updated_at) #{select_sql}")
+ end
+ end
+
+ def update_metrics_with_events_data(min, max)
+ if Gitlab::Database.postgresql?
+ # Uses WITH syntax in order to update merged and closed events with a single UPDATE.
+ # WITH is not supported by MySQL.
+ update_events_for_range(min, max)
+ else
+ update_merged_events_for_range(min, max)
+ update_closed_events_for_range(min, max)
+ end
+ end
+
+ private
+
+ # Updates merge_request_metrics latest_closed_at, latest_closed_by_id and merged_by_id
+ # based on the latest event records on events table for a given merge request batch.
+ def update_events_for_range(min, max)
+ sql = <<-SQL.strip_heredoc
+ WITH events_for_update AS (
+ SELECT DISTINCT ON (target_id, action) target_id, action, author_id, updated_at
+ FROM events
+ WHERE target_id BETWEEN #{min} AND #{max}
+ AND target_type = 'MergeRequest'
+ AND action IN (#{Event::CLOSED},#{Event::MERGED})
+ ORDER BY target_id, action, id DESC
+ )
+ UPDATE merge_request_metrics met
+ SET latest_closed_at = latest_closed.updated_at,
+ latest_closed_by_id = latest_closed.author_id,
+ merged_by_id = latest_merged.author_id
+ FROM (SELECT * FROM events_for_update WHERE action = #{Event::CLOSED}) AS latest_closed
+ FULL OUTER JOIN
+ (SELECT * FROM events_for_update WHERE action = #{Event::MERGED}) AS latest_merged
+ USING (target_id)
+ WHERE target_id = merge_request_id;
+ SQL
+
+ execute(sql)
+ end
+
+ # Updates merge_request_metrics latest_closed_at, latest_closed_by_id based on the latest closed
+ # records on events table for a given merge request batch.
+ def update_closed_events_for_range(min, max)
+ sql =
+ <<-SQL.strip_heredoc
+ UPDATE merge_request_metrics metrics,
+ (#{select_events(min, max, Event::CLOSED)}) closed_events
+ SET metrics.latest_closed_by_id = closed_events.author_id,
+ metrics.latest_closed_at = closed_events.updated_at #{where_matches_closed_events};
+ SQL
+
+ execute(sql)
+ end
+
+ # Updates merge_request_metrics merged_by_id based on the latest merged
+ # records on events table for a given merge request batch.
+ def update_merged_events_for_range(min, max)
+ sql =
+ <<-SQL.strip_heredoc
+ UPDATE merge_request_metrics metrics,
+ (#{select_events(min, max, Event::MERGED)}) merged_events
+ SET metrics.merged_by_id = merged_events.author_id #{where_matches_merged_events};
+ SQL
+
+ execute(sql)
+ end
+
+ def execute(sql)
+ @connection ||= ActiveRecord::Base.connection
+ @connection.execute(sql)
+ end
+
+ def select_events(min, max, action)
+ select_max_event_id = <<-SQL.strip_heredoc
+ SELECT max(id)
+ FROM events
+ WHERE action = #{action}
+ AND target_type = 'MergeRequest'
+ AND target_id BETWEEN #{min} AND #{max}
+ GROUP BY target_id
+ SQL
+
+ <<-SQL.strip_heredoc
+ SELECT author_id, updated_at, target_id
+ FROM events
+ WHERE id IN(#{select_max_event_id})
+ SQL
+ end
+
+ def where_matches_closed_events
+ <<-SQL.strip_heredoc
+ WHERE metrics.merge_request_id = closed_events.target_id
+ AND metrics.latest_closed_at IS NULL
+ AND metrics.latest_closed_by_id IS NULL
+ SQL
+ end
+
+ def where_matches_merged_events
+ <<-SQL.strip_heredoc
+ WHERE metrics.merge_request_id = merged_events.target_id
+ AND metrics.merged_by_id IS NULL
+ SQL
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id.rb b/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id.rb
new file mode 100644
index 00000000000..dcac355e1b0
--- /dev/null
+++ b/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class PopulateMergeRequestsLatestMergeRequestDiffId
+ BATCH_SIZE = 1_000
+
+ class MergeRequest < ActiveRecord::Base
+ self.table_name = 'merge_requests'
+
+ include ::EachBatch
+ end
+
+ def perform(start_id, stop_id)
+ update = '
+ latest_merge_request_diff_id = (
+ SELECT MAX(id)
+ FROM merge_request_diffs
+ WHERE merge_requests.id = merge_request_diffs.merge_request_id
+ )'.squish
+
+ MergeRequest
+ .where(id: start_id..stop_id)
+ .where(latest_merge_request_diff_id: nil)
+ .each_batch(of: BATCH_SIZE) do |relation|
+
+ relation.update_all(update)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/populate_untracked_uploads.rb b/lib/gitlab/background_migration/populate_untracked_uploads.rb
new file mode 100644
index 00000000000..9232f20a063
--- /dev/null
+++ b/lib/gitlab/background_migration/populate_untracked_uploads.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # This class processes a batch of rows in `untracked_files_for_uploads` by
+ # adding each file to the `uploads` table if it does not exist.
+ class PopulateUntrackedUploads # rubocop:disable Metrics/ClassLength
+ def perform(start_id, end_id)
+ return unless migrate?
+
+ files = Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::UntrackedFile.where(id: start_id..end_id)
+ processed_files = insert_uploads_if_needed(files)
+ processed_files.delete_all
+
+ drop_temp_table_if_finished
+ end
+
+ private
+
+ def migrate?
+ Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::UntrackedFile.table_exists? &&
+ Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::Upload.table_exists?
+ end
+
+ def insert_uploads_if_needed(files)
+ filtered_files, error_files = filter_error_files(files)
+ filtered_files = filter_existing_uploads(filtered_files)
+ filtered_files = filter_deleted_models(filtered_files)
+ insert(filtered_files)
+
+ processed_files = files.where.not(id: error_files.map(&:id))
+ processed_files
+ end
+
+ def filter_error_files(files)
+ files.partition do |file|
+ begin
+ file.to_h
+ true
+ rescue => e
+ msg = <<~MSG
+ Error parsing path "#{file.path}":
+ #{e.message}
+ #{e.backtrace.join("\n ")}
+ MSG
+ Rails.logger.error(msg)
+ false
+ end
+ end
+ end
+
+ def filter_existing_uploads(files)
+ paths = files.map(&:upload_path)
+ existing_paths = Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::Upload.where(path: paths).pluck(:path).to_set
+
+ files.reject do |file|
+ existing_paths.include?(file.upload_path)
+ end
+ end
+
+ # There are files on disk that are not in the uploads table because their
+ # model was deleted, and we don't delete the files on disk.
+ def filter_deleted_models(files)
+ ids = deleted_model_ids(files)
+
+ files.reject do |file|
+ ids[file.model_type].include?(file.model_id)
+ end
+ end
+
+ def deleted_model_ids(files)
+ ids = {
+ 'Appearance' => [],
+ 'Namespace' => [],
+ 'Note' => [],
+ 'Project' => [],
+ 'User' => []
+ }
+
+ # group model IDs by model type
+ files.each do |file|
+ ids[file.model_type] << file.model_id
+ end
+
+ ids.each do |model_type, model_ids|
+ model_class = "Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::#{model_type}".constantize
+ found_ids = model_class.where(id: model_ids.uniq).pluck(:id)
+ deleted_ids = ids[model_type] - found_ids
+ ids[model_type] = deleted_ids
+ end
+
+ ids
+ end
+
+ def insert(files)
+ rows = files.map do |file|
+ file.to_h.merge(created_at: 'NOW()')
+ end
+
+ Gitlab::Database.bulk_insert('uploads',
+ rows,
+ disable_quote: :created_at)
+ end
+
+ def drop_temp_table_if_finished
+ if Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::UntrackedFile.all.empty? && !Rails.env.test? # Dropping a table intermittently breaks test cleanup
+ Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::UntrackedFile.connection.drop_table(:untracked_files_for_uploads,
+ if_exists: true)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/populate_untracked_uploads_dependencies.rb b/lib/gitlab/background_migration/populate_untracked_uploads_dependencies.rb
new file mode 100644
index 00000000000..a2c5acbde71
--- /dev/null
+++ b/lib/gitlab/background_migration/populate_untracked_uploads_dependencies.rb
@@ -0,0 +1,201 @@
+# frozen_string_literal: true
+module Gitlab
+ module BackgroundMigration
+ module PopulateUntrackedUploadsDependencies
+ # This class is responsible for producing the attributes necessary to
+ # track an uploaded file in the `uploads` table.
+ class UntrackedFile < ActiveRecord::Base # rubocop:disable Metrics/ClassLength, Metrics/LineLength
+ self.table_name = 'untracked_files_for_uploads'
+
+ # Ends with /:random_hex/:filename
+ FILE_UPLOADER_PATH = %r{/\h+/[^/]+\z}
+ FULL_PATH_CAPTURE = /\A(.+)#{FILE_UPLOADER_PATH}/
+
+ # These regex patterns are tested against a relative path, relative to
+ # the upload directory.
+ # For convenience, if there exists a capture group in the pattern, then
+ # it indicates the model_id.
+ PATH_PATTERNS = [
+ {
+ pattern: %r{\A-/system/appearance/logo/(\d+)/},
+ uploader: 'AttachmentUploader',
+ model_type: 'Appearance'
+ },
+ {
+ pattern: %r{\A-/system/appearance/header_logo/(\d+)/},
+ uploader: 'AttachmentUploader',
+ model_type: 'Appearance'
+ },
+ {
+ pattern: %r{\A-/system/note/attachment/(\d+)/},
+ uploader: 'AttachmentUploader',
+ model_type: 'Note'
+ },
+ {
+ pattern: %r{\A-/system/user/avatar/(\d+)/},
+ uploader: 'AvatarUploader',
+ model_type: 'User'
+ },
+ {
+ pattern: %r{\A-/system/group/avatar/(\d+)/},
+ uploader: 'AvatarUploader',
+ model_type: 'Namespace'
+ },
+ {
+ pattern: %r{\A-/system/project/avatar/(\d+)/},
+ uploader: 'AvatarUploader',
+ model_type: 'Project'
+ },
+ {
+ pattern: FILE_UPLOADER_PATH,
+ uploader: 'FileUploader',
+ model_type: 'Project'
+ }
+ ].freeze
+
+ def to_h
+ @upload_hash ||= {
+ path: upload_path,
+ uploader: uploader,
+ model_type: model_type,
+ model_id: model_id,
+ size: file_size,
+ checksum: checksum
+ }
+ end
+
+ def upload_path
+ # UntrackedFile#path is absolute, but Upload#path depends on uploader
+ @upload_path ||=
+ if uploader == 'FileUploader'
+ # Path relative to project directory in uploads
+ matchd = path_relative_to_upload_dir.match(FILE_UPLOADER_PATH)
+ matchd[0].sub(%r{\A/}, '') # remove leading slash
+ else
+ path
+ end
+ end
+
+ def uploader
+ matching_pattern_map[:uploader]
+ end
+
+ def model_type
+ matching_pattern_map[:model_type]
+ end
+
+ def model_id
+ return @model_id if defined?(@model_id)
+
+ pattern = matching_pattern_map[:pattern]
+ matchd = path_relative_to_upload_dir.match(pattern)
+
+ # If something is captured (matchd[1] is not nil), it is a model_id
+ # Only the FileUploader pattern will not match an ID
+ @model_id = matchd[1] ? matchd[1].to_i : file_uploader_model_id
+ end
+
+ def file_size
+ File.size(absolute_path)
+ end
+
+ def checksum
+ Digest::SHA256.file(absolute_path).hexdigest
+ end
+
+ private
+
+ def matching_pattern_map
+ @matching_pattern_map ||= PATH_PATTERNS.find do |path_pattern_map|
+ path_relative_to_upload_dir.match(path_pattern_map[:pattern])
+ end
+
+ unless @matching_pattern_map
+ raise "Unknown upload path pattern \"#{path}\""
+ end
+
+ @matching_pattern_map
+ end
+
+ def file_uploader_model_id
+ matchd = path_relative_to_upload_dir.match(FULL_PATH_CAPTURE)
+ not_found_msg = <<~MSG
+ Could not capture project full_path from a FileUploader path:
+ "#{path_relative_to_upload_dir}"
+ MSG
+ raise not_found_msg unless matchd
+
+ full_path = matchd[1]
+ project = Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::Project.find_by_full_path(full_path)
+ return nil unless project
+
+ project.id
+ end
+
+ # Not including a leading slash
+ def path_relative_to_upload_dir
+ upload_dir = Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR # rubocop:disable Metrics/LineLength
+ base = %r{\A#{Regexp.escape(upload_dir)}/}
+ @path_relative_to_upload_dir ||= path.sub(base, '')
+ end
+
+ def absolute_path
+ File.join(Gitlab.config.uploads.storage_path, path)
+ end
+ end
+
+ # Avoid using application code
+ class Upload < ActiveRecord::Base
+ self.table_name = 'uploads'
+ end
+
+ # Avoid using application code
+ class Appearance < ActiveRecord::Base
+ self.table_name = 'appearances'
+ end
+
+ # Avoid using application code
+ class Namespace < ActiveRecord::Base
+ self.table_name = 'namespaces'
+ end
+
+ # Avoid using application code
+ class Note < ActiveRecord::Base
+ self.table_name = 'notes'
+ end
+
+ # Avoid using application code
+ class User < ActiveRecord::Base
+ self.table_name = 'users'
+ end
+
+ # Since project Markdown upload paths don't contain the project ID, we have to find the
+ # project by its full_path. Due to MySQL/PostgreSQL differences, and historical reasons,
+ # the logic is somewhat complex, so I've mostly copied it in here.
+ class Project < ActiveRecord::Base
+ self.table_name = 'projects'
+
+ def self.find_by_full_path(path)
+ binary = Gitlab::Database.mysql? ? 'BINARY' : ''
+ order_sql = "(CASE WHEN #{binary} routes.path = #{connection.quote(path)} THEN 0 ELSE 1 END)"
+ where_full_path_in(path).reorder(order_sql).take
+ end
+
+ def self.where_full_path_in(path)
+ cast_lower = Gitlab::Database.postgresql?
+
+ path = connection.quote(path)
+
+ where =
+ if cast_lower
+ "(LOWER(routes.path) = LOWER(#{path}))"
+ else
+ "(routes.path = #{path})"
+ end
+
+ joins("INNER JOIN routes ON routes.source_id = projects.id AND routes.source_type = 'Project'").where(where)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/prepare_untracked_uploads.rb b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
new file mode 100644
index 00000000000..914a9e48a2f
--- /dev/null
+++ b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
@@ -0,0 +1,181 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # This class finds all non-hashed uploaded file paths and saves them to a
+ # `untracked_files_for_uploads` table.
+ class PrepareUntrackedUploads # rubocop:disable Metrics/ClassLength
+ # For bulk_queue_background_migration_jobs_by_range
+ include Database::MigrationHelpers
+ include ::Gitlab::Utils::StrongMemoize
+
+ FIND_BATCH_SIZE = 500
+ RELATIVE_UPLOAD_DIR = "uploads".freeze
+ ABSOLUTE_UPLOAD_DIR = File.join(
+ Gitlab.config.uploads.storage_path,
+ RELATIVE_UPLOAD_DIR
+ )
+ FOLLOW_UP_MIGRATION = 'PopulateUntrackedUploads'.freeze
+ START_WITH_ROOT_REGEX = %r{\A#{Gitlab.config.uploads.storage_path}/}
+ EXCLUDED_HASHED_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/@hashed/*".freeze
+ EXCLUDED_TMP_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/tmp/*".freeze
+
+ # This class is used to iterate over batches of
+ # `untracked_files_for_uploads` rows.
+ class UntrackedFile < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'untracked_files_for_uploads'
+ end
+
+ def perform
+ ensure_temporary_tracking_table_exists
+
+ # Since Postgres < 9.5 does not have ON CONFLICT DO NOTHING, and since
+ # doing inserts-if-not-exists without ON CONFLICT DO NOTHING would be
+ # slow, start with an empty table for Postgres < 9.5.
+ # That way we can do bulk inserts at ~30x the speed of individual
+ # inserts (~20 minutes worth of inserts at GitLab.com scale instead of
+ # ~10 hours).
+ # In all other cases, installations will get both bulk inserts and the
+ # ability for these jobs to retry without having to clear and reinsert.
+ clear_untracked_file_paths unless can_bulk_insert_and_ignore_duplicates?
+
+ store_untracked_file_paths
+
+ if UntrackedFile.all.empty?
+ drop_temp_table
+ else
+ schedule_populate_untracked_uploads_jobs
+ end
+ end
+
+ private
+
+ def ensure_temporary_tracking_table_exists
+ table_name = :untracked_files_for_uploads
+ unless UntrackedFile.connection.table_exists?(table_name)
+ UntrackedFile.connection.create_table table_name do |t|
+ t.string :path, limit: 600, null: false
+ t.index :path, unique: true
+ end
+ end
+ end
+
+ def clear_untracked_file_paths
+ UntrackedFile.delete_all
+ end
+
+ def store_untracked_file_paths
+ return unless Dir.exist?(ABSOLUTE_UPLOAD_DIR)
+
+ each_file_batch(ABSOLUTE_UPLOAD_DIR, FIND_BATCH_SIZE) do |file_paths|
+ insert_file_paths(file_paths)
+ end
+ end
+
+ def each_file_batch(search_dir, batch_size, &block)
+ cmd = build_find_command(search_dir)
+
+ Open3.popen2(*cmd) do |stdin, stdout, status_thread|
+ yield_paths_in_batches(stdout, batch_size, &block)
+
+ raise "Find command failed" unless status_thread.value.success?
+ end
+ end
+
+ def yield_paths_in_batches(stdout, batch_size, &block)
+ paths = []
+
+ stdout.each_line("\0") do |line|
+ paths << line.chomp("\0").sub(START_WITH_ROOT_REGEX, '')
+
+ if paths.size >= batch_size
+ yield(paths)
+ paths = []
+ end
+ end
+
+ yield(paths) if paths.any?
+ end
+
+ def build_find_command(search_dir)
+ cmd = %W[find -L #{search_dir}
+ -type f
+ ! ( -path #{EXCLUDED_HASHED_UPLOADS_PATH} -prune )
+ ! ( -path #{EXCLUDED_TMP_UPLOADS_PATH} -prune )
+ -print0]
+
+ ionice = which_ionice
+ cmd = %W[#{ionice} -c Idle] + cmd if ionice
+
+ log_msg = "PrepareUntrackedUploads find command: \"#{cmd.join(' ')}\""
+ Rails.logger.info log_msg
+
+ cmd
+ end
+
+ def which_ionice
+ Gitlab::Utils.which('ionice')
+ rescue StandardError
+ # In this case, returning false is relatively safe,
+ # even though it isn't very nice
+ false
+ end
+
+ def insert_file_paths(file_paths)
+ sql = insert_sql(file_paths)
+
+ ActiveRecord::Base.connection.execute(sql)
+ end
+
+ def insert_sql(file_paths)
+ if postgresql_pre_9_5?
+ "INSERT INTO #{table_columns_and_values_for_insert(file_paths)};"
+ elsif postgresql?
+ "INSERT INTO #{table_columns_and_values_for_insert(file_paths)}"\
+ " ON CONFLICT DO NOTHING;"
+ else # MySQL
+ "INSERT IGNORE INTO"\
+ " #{table_columns_and_values_for_insert(file_paths)};"
+ end
+ end
+
+ def table_columns_and_values_for_insert(file_paths)
+ values = file_paths.map do |file_path|
+ ActiveRecord::Base.send(:sanitize_sql_array, ['(?)', file_path]) # rubocop:disable GitlabSecurity/PublicSend, Metrics/LineLength
+ end.join(', ')
+
+ "#{UntrackedFile.table_name} (path) VALUES #{values}"
+ end
+
+ def postgresql?
+ strong_memoize(:postgresql) do
+ Gitlab::Database.postgresql?
+ end
+ end
+
+ def can_bulk_insert_and_ignore_duplicates?
+ !postgresql_pre_9_5?
+ end
+
+ def postgresql_pre_9_5?
+ strong_memoize(:postgresql_pre_9_5) do
+ postgresql? && Gitlab::Database.version.to_f < 9.5
+ end
+ end
+
+ def schedule_populate_untracked_uploads_jobs
+ bulk_queue_background_migration_jobs_by_range(
+ UntrackedFile, FOLLOW_UP_MIGRATION)
+ end
+
+ def drop_temp_table
+ unless Rails.env.test? # Dropping a table intermittently breaks test cleanup
+ UntrackedFile.connection.drop_table(:untracked_files_for_uploads,
+ if_exists: true)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/badge/coverage/report.rb b/lib/gitlab/badge/coverage/report.rb
index 9a0482306b7..778d78185ff 100644
--- a/lib/gitlab/badge/coverage/report.rb
+++ b/lib/gitlab/badge/coverage/report.rb
@@ -23,7 +23,7 @@ module Gitlab
@coverage ||= raw_coverage
return unless @coverage
- @coverage.to_i
+ @coverage.to_f.round(2)
end
def metadata
diff --git a/lib/gitlab/badge/coverage/template.rb b/lib/gitlab/badge/coverage/template.rb
index fcecb1d9665..afbf9dd17e3 100644
--- a/lib/gitlab/badge/coverage/template.rb
+++ b/lib/gitlab/badge/coverage/template.rb
@@ -25,7 +25,7 @@ module Gitlab
end
def value_text
- @status ? "#{@status}%" : 'unknown'
+ @status ? ("%.2f%%" % @status) : 'unknown'
end
def key_width
@@ -33,7 +33,7 @@ module Gitlab
end
def value_width
- @status ? 36 : 58
+ @status ? 54 : 58
end
def value_color
diff --git a/lib/gitlab/bare_repository_import/importer.rb b/lib/gitlab/bare_repository_import/importer.rb
new file mode 100644
index 00000000000..884a3de8f62
--- /dev/null
+++ b/lib/gitlab/bare_repository_import/importer.rb
@@ -0,0 +1,106 @@
+module Gitlab
+ module BareRepositoryImport
+ class Importer
+ NoAdminError = Class.new(StandardError)
+
+ def self.execute(import_path)
+ import_path << '/' unless import_path.ends_with?('/')
+ repos_to_import = Dir.glob(import_path + '**/*.git')
+
+ unless user = User.admins.order_id_asc.first
+ raise NoAdminError.new('No admin user found to import repositories')
+ end
+
+ repos_to_import.each do |repo_path|
+ bare_repo = Gitlab::BareRepositoryImport::Repository.new(import_path, repo_path)
+
+ unless bare_repo.processable?
+ log " * Skipping repo #{bare_repo.repo_path}".color(:yellow)
+
+ next
+ end
+
+ log "Processing #{repo_path}".color(:yellow)
+
+ new(user, bare_repo).create_project_if_needed
+ end
+ end
+
+ attr_reader :user, :project_name, :bare_repo
+
+ delegate :log, to: :class
+ delegate :project_name, :project_full_path, :group_path, :repo_path, :wiki_path, to: :bare_repo
+
+ def initialize(user, bare_repo)
+ @user = user
+ @bare_repo = bare_repo
+ end
+
+ def create_project_if_needed
+ if project = Project.find_by_full_path(project_full_path)
+ log " * #{project.name} (#{project_full_path}) exists"
+
+ return project
+ end
+
+ create_project
+ end
+
+ private
+
+ def create_project
+ group = find_or_create_groups
+
+ project = Projects::CreateService.new(user,
+ name: project_name,
+ path: project_name,
+ skip_disk_validation: true,
+ skip_wiki: bare_repo.wiki_exists?,
+ import_type: 'bare_repository',
+ namespace_id: group&.id).execute
+
+ if project.persisted? && mv_repo(project)
+ log " * Created #{project.name} (#{project_full_path})".color(:green)
+
+ project.write_repository_config
+ project.repository.create_hooks
+
+ ProjectCacheWorker.perform_async(project.id)
+ else
+ log " * Failed trying to create #{project.name} (#{project_full_path})".color(:red)
+ log " Errors: #{project.errors.messages}".color(:red) if project.errors.any?
+ end
+
+ project
+ end
+
+ def mv_repo(project)
+ FileUtils.mv(repo_path, File.join(project.repository_storage_path, project.disk_path + '.git'))
+
+ if bare_repo.wiki_exists?
+ FileUtils.mv(wiki_path, File.join(project.repository_storage_path, project.disk_path + '.wiki.git'))
+ end
+
+ true
+ rescue => e
+ log " * Failed to move repo: #{e.message}".color(:red)
+
+ false
+ end
+
+ def find_or_create_groups
+ return nil unless group_path.present?
+
+ log " * Using namespace: #{group_path}"
+
+ Groups::NestedCreateService.new(user, group_path: group_path).execute
+ end
+
+ # This is called from within a rake task only used by Admins, so allow writing
+ # to STDOUT
+ def self.log(message)
+ puts message # rubocop:disable Rails/Output
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/bare_repository_import/repository.rb b/lib/gitlab/bare_repository_import/repository.rb
new file mode 100644
index 00000000000..fe267248275
--- /dev/null
+++ b/lib/gitlab/bare_repository_import/repository.rb
@@ -0,0 +1,69 @@
+# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/953
+#
+module Gitlab
+ module BareRepositoryImport
+ class Repository
+ include ::Gitlab::Utils::StrongMemoize
+
+ attr_reader :group_path, :project_name, :repo_path
+
+ def initialize(root_path, repo_path)
+ @root_path = root_path
+ @repo_path = repo_path
+ @root_path << '/' unless root_path.ends_with?('/')
+
+ full_path =
+ if hashed? && !wiki?
+ repository.config.get('gitlab.fullpath')
+ else
+ repo_relative_path
+ end
+
+ # Split path into 'all/the/namespaces' and 'project_name'
+ @group_path, _, @project_name = full_path.to_s.rpartition('/')
+ end
+
+ def wiki_exists?
+ File.exist?(wiki_path)
+ end
+
+ def wiki_path
+ @wiki_path ||= repo_path.sub(/\.git$/, '.wiki.git')
+ end
+
+ def project_full_path
+ @project_full_path ||= "#{group_path}/#{project_name}"
+ end
+
+ def processable?
+ return false if wiki?
+ return false if hashed? && (group_path.blank? || project_name.blank?)
+
+ true
+ end
+
+ private
+
+ def wiki?
+ strong_memoize(:wiki) do
+ repo_path.end_with?('.wiki.git')
+ end
+ end
+
+ def hashed?
+ strong_memoize(:hashed) do
+ repo_relative_path.include?('@hashed')
+ end
+ end
+
+ def repo_relative_path
+ # Remove root path and `.git` at the end
+ repo_path[@root_path.size...-4]
+ end
+
+ def repository
+ @repository ||= Rugged::Repository.new(repo_path)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/bare_repository_importer.rb b/lib/gitlab/bare_repository_importer.rb
deleted file mode 100644
index 9323bfc7fb2..00000000000
--- a/lib/gitlab/bare_repository_importer.rb
+++ /dev/null
@@ -1,96 +0,0 @@
-module Gitlab
- class BareRepositoryImporter
- NoAdminError = Class.new(StandardError)
-
- def self.execute
- Gitlab.config.repositories.storages.each do |storage_name, repository_storage|
- git_base_path = repository_storage['path']
- repos_to_import = Dir.glob(git_base_path + '/**/*.git')
-
- repos_to_import.each do |repo_path|
- if repo_path.end_with?('.wiki.git')
- log " * Skipping wiki repo"
- next
- end
-
- log "Processing #{repo_path}".color(:yellow)
-
- repo_relative_path = repo_path[repository_storage['path'].length..-1]
- .sub(/^\//, '') # Remove leading `/`
- .sub(/\.git$/, '') # Remove `.git` at the end
- new(storage_name, repo_relative_path).create_project_if_needed
- end
- end
- end
-
- attr_reader :storage_name, :full_path, :group_path, :project_path, :user
- delegate :log, to: :class
-
- def initialize(storage_name, repo_path)
- @storage_name = storage_name
- @full_path = repo_path
-
- unless @user = User.admins.order_id_asc.first
- raise NoAdminError.new('No admin user found to import repositories')
- end
-
- @group_path, @project_path = File.split(repo_path)
- @group_path = nil if @group_path == '.'
- end
-
- def create_project_if_needed
- if project = Project.find_by_full_path(full_path)
- log " * #{project.name} (#{full_path}) exists"
- return project
- end
-
- create_project
- end
-
- private
-
- def create_project
- group = find_or_create_group
-
- project_params = {
- name: project_path,
- path: project_path,
- repository_storage: storage_name,
- namespace_id: group&.id
- }
-
- project = Projects::CreateService.new(user, project_params).execute
-
- if project.persisted?
- log " * Created #{project.name} (#{full_path})".color(:green)
- ProjectCacheWorker.perform_async(project.id)
- else
- log " * Failed trying to create #{project.name} (#{full_path})".color(:red)
- log " Errors: #{project.errors.messages}".color(:red)
- end
-
- project
- end
-
- def find_or_create_group
- return nil unless group_path
-
- if namespace = Namespace.find_by_full_path(group_path)
- log " * Namespace #{group_path} exists.".color(:green)
- return namespace
- end
-
- log " * Creating Group: #{group_path}"
- Groups::NestedCreateService.new(user, group_path: group_path).execute
- end
-
- # This is called from within a rake task only used by Admins, so allow writing
- # to STDOUT
- #
- # rubocop:disable Rails/Output
- def self.log(message)
- puts message
- end
- # rubocop:enable Rails/Output
- end
-end
diff --git a/lib/gitlab/bitbucket_import/importer.rb b/lib/gitlab/bitbucket_import/importer.rb
index 28bbf3b384e..d48ae17aeaf 100644
--- a/lib/gitlab/bitbucket_import/importer.rb
+++ b/lib/gitlab/bitbucket_import/importer.rb
@@ -61,9 +61,9 @@ module Gitlab
def import_wiki
return if project.wiki.repository_exists?
- path_with_namespace = "#{project.full_path}.wiki"
+ disk_path = project.wiki.disk_path
import_url = project.import_url.sub(/\.git\z/, ".git/wiki")
- gitlab_shell.import_repository(project.repository_storage_path, path_with_namespace, import_url)
+ gitlab_shell.import_repository(project.repository_storage_path, disk_path, import_url)
rescue StandardError => e
errors << { type: :wiki, errors: e.message }
end
@@ -149,16 +149,21 @@ module Gitlab
description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author)
description += pull_request.description
+ source_branch_sha = pull_request.source_branch_sha
+ target_branch_sha = pull_request.target_branch_sha
+ source_branch_sha = project.repository.commit(source_branch_sha)&.sha || source_branch_sha
+ target_branch_sha = project.repository.commit(target_branch_sha)&.sha || target_branch_sha
+
merge_request = project.merge_requests.create!(
iid: pull_request.iid,
title: pull_request.title,
description: description,
source_project: project,
source_branch: pull_request.source_branch_name,
- source_branch_sha: pull_request.source_branch_sha,
+ source_branch_sha: source_branch_sha,
target_project: project,
target_branch: pull_request.target_branch_name,
- target_branch_sha: pull_request.target_branch_sha,
+ target_branch_sha: target_branch_sha,
state: pull_request.state,
author_id: gitlab_user_id(project, pull_request.author),
assignee_id: nil,
@@ -236,7 +241,7 @@ module Gitlab
end
def generate_line_code(pr_comment)
- Gitlab::Diff::LineCode.generate(pr_comment.file_path, pr_comment.new_pos, pr_comment.old_pos)
+ Gitlab::Git.diff_line_code(pr_comment.file_path, pr_comment.new_pos, pr_comment.old_pos)
end
def pull_request_comment_attributes(comment)
diff --git a/lib/gitlab/cache/request_cache.rb b/lib/gitlab/cache/request_cache.rb
index 754a45c3257..ecc85f847d4 100644
--- a/lib/gitlab/cache/request_cache.rb
+++ b/lib/gitlab/cache/request_cache.rb
@@ -45,11 +45,13 @@ module Gitlab
klass.prepend(extension)
end
+ attr_accessor :request_cache_key_block
+
def request_cache_key(&block)
if block_given?
- @request_cache_key = block
+ self.request_cache_key_block = block
else
- @request_cache_key
+ request_cache_key_block
end
end
diff --git a/lib/gitlab/changes_list.rb b/lib/gitlab/changes_list.rb
index 5b32fca00a4..9c9e6668e6f 100644
--- a/lib/gitlab/changes_list.rb
+++ b/lib/gitlab/changes_list.rb
@@ -16,6 +16,7 @@ module Gitlab
@changes ||= begin
@raw_changes.map do |change|
next if change.blank?
+
oldrev, newrev, ref = change.strip.split(' ')
{ oldrev: oldrev, newrev: newrev, ref: ref }
end.compact
diff --git a/lib/gitlab/checks/change_access.rb b/lib/gitlab/checks/change_access.rb
index b6805230348..3ce5f807989 100644
--- a/lib/gitlab/checks/change_access.rb
+++ b/lib/gitlab/checks/change_access.rb
@@ -12,14 +12,15 @@ module Gitlab
change_existing_tags: 'You are not allowed to change existing tags on this project.',
update_protected_tag: 'Protected tags cannot be updated.',
delete_protected_tag: 'Protected tags cannot be deleted.',
- create_protected_tag: 'You are not allowed to create this tag as it is protected.'
+ create_protected_tag: 'You are not allowed to create this tag as it is protected.',
+ lfs_objects_missing: 'LFS objects are missing. Ensure LFS is properly set up or try a manual "git lfs push --all".'
}.freeze
- attr_reader :user_access, :project, :skip_authorization, :protocol
+ attr_reader :user_access, :project, :skip_authorization, :skip_lfs_integrity_check, :protocol, :oldrev, :newrev, :ref, :branch_name, :tag_name
def initialize(
change, user_access:, project:, skip_authorization: false,
- protocol:
+ skip_lfs_integrity_check: false, protocol:
)
@oldrev, @newrev, @ref = change.values_at(:oldrev, :newrev, :ref)
@branch_name = Gitlab::Git.branch_name(@ref)
@@ -27,15 +28,18 @@ module Gitlab
@user_access = user_access
@project = project
@skip_authorization = skip_authorization
+ @skip_lfs_integrity_check = skip_lfs_integrity_check
@protocol = protocol
end
- def exec
+ def exec(skip_commits_check: false)
return true if skip_authorization
push_checks
branch_checks
tag_checks
+ lfs_objects_exist_check unless skip_lfs_integrity_check
+ commits_check unless skip_commits_check
true
end
@@ -49,9 +53,9 @@ module Gitlab
end
def branch_checks
- return unless @branch_name
+ return unless branch_name
- if deletion? && @branch_name == project.default_branch
+ if deletion? && branch_name == project.default_branch
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:delete_default_branch]
end
@@ -59,7 +63,7 @@ module Gitlab
end
def protected_branch_checks
- return unless ProtectedBranch.protected?(project, @branch_name)
+ return unless ProtectedBranch.protected?(project, branch_name)
if forced_push?
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:force_push_protected_branch]
@@ -73,29 +77,29 @@ module Gitlab
end
def protected_branch_deletion_checks
- unless user_access.can_delete_branch?(@branch_name)
+ unless user_access.can_delete_branch?(branch_name)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:non_master_delete_protected_branch]
end
- unless protocol == 'web'
+ unless updated_from_web?
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:non_web_delete_protected_branch]
end
end
def protected_branch_push_checks
if matching_merge_request?
- unless user_access.can_merge_to_branch?(@branch_name) || user_access.can_push_to_branch?(@branch_name)
+ unless user_access.can_merge_to_branch?(branch_name) || user_access.can_push_to_branch?(branch_name)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:merge_protected_branch]
end
else
- unless user_access.can_push_to_branch?(@branch_name)
+ unless user_access.can_push_to_branch?(branch_name)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:push_protected_branch]
end
end
end
def tag_checks
- return unless @tag_name
+ return unless tag_name
if tag_exists? && user_access.cannot_do_action?(:admin_project)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:change_existing_tags]
@@ -105,36 +109,79 @@ module Gitlab
end
def protected_tag_checks
- return unless ProtectedTag.protected?(project, @tag_name)
+ return unless ProtectedTag.protected?(project, tag_name)
raise(GitAccess::UnauthorizedError, ERROR_MESSAGES[:update_protected_tag]) if update?
raise(GitAccess::UnauthorizedError, ERROR_MESSAGES[:delete_protected_tag]) if deletion?
- unless user_access.can_create_tag?(@tag_name)
+ unless user_access.can_create_tag?(tag_name)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:create_protected_tag]
end
end
+ def commits_check
+ return if deletion? || newrev.nil?
+ return unless should_run_commit_validations?
+
+ # n+1: https://gitlab.com/gitlab-org/gitlab-ee/issues/3593
+ ::Gitlab::GitalyClient.allow_n_plus_1_calls do
+ commits.each do |commit|
+ commit_check.validate(commit, validations_for_commit(commit))
+ end
+ end
+
+ commit_check.validate_file_paths
+ end
+
+ # Method overwritten in EE to inject custom validations
+ def validations_for_commit(_)
+ []
+ end
+
private
+ def should_run_commit_validations?
+ commit_check.validate_lfs_file_locks?
+ end
+
+ def updated_from_web?
+ protocol == 'web'
+ end
+
def tag_exists?
- project.repository.tag_exists?(@tag_name)
+ project.repository.tag_exists?(tag_name)
end
def forced_push?
- Gitlab::Checks::ForcePush.force_push?(@project, @oldrev, @newrev)
+ Gitlab::Checks::ForcePush.force_push?(project, oldrev, newrev)
end
def update?
- !Gitlab::Git.blank_ref?(@oldrev) && !deletion?
+ !Gitlab::Git.blank_ref?(oldrev) && !deletion?
end
def deletion?
- Gitlab::Git.blank_ref?(@newrev)
+ Gitlab::Git.blank_ref?(newrev)
end
def matching_merge_request?
- Checks::MatchingMergeRequest.new(@newrev, @branch_name, @project).match?
+ Checks::MatchingMergeRequest.new(newrev, branch_name, project).match?
+ end
+
+ def lfs_objects_exist_check
+ lfs_check = Checks::LfsIntegrity.new(project, newrev)
+
+ if lfs_check.objects_missing?
+ raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:lfs_objects_missing]
+ end
+ end
+
+ def commit_check
+ @commit_check ||= Gitlab::Checks::CommitCheck.new(project, user_access.user, newrev, oldrev)
+ end
+
+ def commits
+ @commits ||= project.repository.new_commits(newrev)
end
end
end
diff --git a/lib/gitlab/checks/commit_check.rb b/lib/gitlab/checks/commit_check.rb
new file mode 100644
index 00000000000..43a52b493bb
--- /dev/null
+++ b/lib/gitlab/checks/commit_check.rb
@@ -0,0 +1,61 @@
+module Gitlab
+ module Checks
+ class CommitCheck
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :project, :user, :newrev, :oldrev
+
+ def initialize(project, user, newrev, oldrev)
+ @project = project
+ @user = user
+ @newrev = user
+ @oldrev = user
+ @file_paths = []
+ end
+
+ def validate(commit, validations)
+ return if validations.empty? && path_validations.empty?
+
+ commit.raw_deltas.each do |diff|
+ @file_paths << (diff.new_path || diff.old_path)
+
+ validations.each do |validation|
+ if error = validation.call(diff)
+ raise ::Gitlab::GitAccess::UnauthorizedError, error
+ end
+ end
+ end
+ end
+
+ def validate_file_paths
+ path_validations.each do |validation|
+ if error = validation.call(@file_paths)
+ raise ::Gitlab::GitAccess::UnauthorizedError, error
+ end
+ end
+ end
+
+ def validate_lfs_file_locks?
+ strong_memoize(:validate_lfs_file_locks) do
+ project.lfs_enabled? && project.lfs_file_locks.any? && newrev && oldrev
+ end
+ end
+
+ private
+
+ def lfs_file_locks_validation
+ lambda do |paths|
+ lfs_lock = project.lfs_file_locks.where(path: paths).where.not(user_id: user.id).first
+
+ if lfs_lock
+ return "The path '#{lfs_lock.path}' is locked in Git LFS by #{lfs_lock.user.name}"
+ end
+ end
+ end
+
+ def path_validations
+ validate_lfs_file_locks? ? [lfs_file_locks_validation] : []
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/checks/force_push.rb b/lib/gitlab/checks/force_push.rb
index dc5d285ea65..c9c3050cfc2 100644
--- a/lib/gitlab/checks/force_push.rb
+++ b/lib/gitlab/checks/force_push.rb
@@ -15,8 +15,8 @@ module Gitlab
.ancestor?(oldrev, newrev)
else
Gitlab::Git::RevList.new(
- path_to_repo: project.repository.path_to_repo,
- oldrev: oldrev, newrev: newrev).missed_ref.present?
+ project.repository.raw, oldrev: oldrev, newrev: newrev
+ ).missed_ref.present?
end
end
end
diff --git a/lib/gitlab/checks/lfs_integrity.rb b/lib/gitlab/checks/lfs_integrity.rb
new file mode 100644
index 00000000000..f7276a380dc
--- /dev/null
+++ b/lib/gitlab/checks/lfs_integrity.rb
@@ -0,0 +1,27 @@
+module Gitlab
+ module Checks
+ class LfsIntegrity
+ REV_LIST_OBJECT_LIMIT = 2_000
+
+ def initialize(project, newrev)
+ @project = project
+ @newrev = newrev
+ end
+
+ def objects_missing?
+ return false unless @newrev && @project.lfs_enabled?
+
+ new_lfs_pointers = Gitlab::Git::LfsChanges.new(@project.repository, @newrev).new_pointers(object_limit: REV_LIST_OBJECT_LIMIT)
+
+ return false unless new_lfs_pointers.present?
+
+ existing_count = @project.lfs_storage_project
+ .lfs_objects
+ .where(oid: new_lfs_pointers.map(&:lfs_oid))
+ .count
+
+ existing_count != new_lfs_pointers.count
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/checks/post_push_message.rb b/lib/gitlab/checks/post_push_message.rb
new file mode 100644
index 00000000000..473c0385b34
--- /dev/null
+++ b/lib/gitlab/checks/post_push_message.rb
@@ -0,0 +1,46 @@
+module Gitlab
+ module Checks
+ class PostPushMessage
+ def initialize(project, user, protocol)
+ @project = project
+ @user = user
+ @protocol = protocol
+ end
+
+ def self.fetch_message(user_id, project_id)
+ key = message_key(user_id, project_id)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ message = redis.get(key)
+ redis.del(key)
+ message
+ end
+ end
+
+ def add_message
+ return unless user.present? && project.present?
+
+ Gitlab::Redis::SharedState.with do |redis|
+ key = self.class.message_key(user.id, project.id)
+ redis.setex(key, 5.minutes, message)
+ end
+ end
+
+ def message
+ raise NotImplementedError
+ end
+
+ protected
+
+ attr_reader :project, :user, :protocol
+
+ def self.message_key(user_id, project_id)
+ raise NotImplementedError
+ end
+
+ def url_to_repo
+ protocol == 'ssh' ? project.ssh_url_to_repo : project.http_url_to_repo
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/checks/project_created.rb b/lib/gitlab/checks/project_created.rb
new file mode 100644
index 00000000000..cec270d6a58
--- /dev/null
+++ b/lib/gitlab/checks/project_created.rb
@@ -0,0 +1,31 @@
+module Gitlab
+ module Checks
+ class ProjectCreated < PostPushMessage
+ PROJECT_CREATED = "project_created".freeze
+
+ def message
+ <<~MESSAGE
+
+ The private project #{project.full_path} was successfully created.
+
+ To configure the remote, run:
+ git remote add origin #{url_to_repo}
+
+ To view the project, visit:
+ #{project_url}
+
+ MESSAGE
+ end
+
+ private
+
+ def self.message_key(user_id, project_id)
+ "#{PROJECT_CREATED}:#{user_id}:#{project_id}"
+ end
+
+ def project_url
+ Gitlab::Routing.url_helpers.project_url(project)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/checks/project_moved.rb b/lib/gitlab/checks/project_moved.rb
new file mode 100644
index 00000000000..3263790a876
--- /dev/null
+++ b/lib/gitlab/checks/project_moved.rb
@@ -0,0 +1,47 @@
+module Gitlab
+ module Checks
+ class ProjectMoved < PostPushMessage
+ REDIRECT_NAMESPACE = "redirect_namespace".freeze
+
+ def initialize(project, user, protocol, redirected_path)
+ @redirected_path = redirected_path
+
+ super(project, user, protocol)
+ end
+
+ def message(rejected: false)
+ <<~MESSAGE
+ Project '#{redirected_path}' was moved to '#{project.full_path}'.
+
+ Please update your Git remote:
+
+ #{remote_url_message(rejected)}
+ MESSAGE
+ end
+
+ def permanent_redirect?
+ RedirectRoute.permanent.exists?(path: redirected_path)
+ end
+
+ private
+
+ attr_reader :redirected_path
+
+ def self.message_key(user_id, project_id)
+ "#{REDIRECT_NAMESPACE}:#{user_id}:#{project_id}"
+ end
+
+ def remote_url_message(rejected)
+ if rejected
+ "git remote set-url origin #{url_to_repo} and try again."
+ else
+ "git remote set-url origin #{url_to_repo}"
+ end
+ end
+
+ def url
+ protocol == 'ssh' ? project.ssh_url_to_repo : project.http_url_to_repo
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/ansi2html.rb b/lib/gitlab/ci/ansi2html.rb
index ad78ae244b2..35eadf6fa93 100644
--- a/lib/gitlab/ci/ansi2html.rb
+++ b/lib/gitlab/ci/ansi2html.rb
@@ -148,6 +148,7 @@ module Gitlab
stream.seek(@offset)
append = @offset > 0
end
+
start_offset = @offset
open_new_tag
@@ -155,7 +156,10 @@ module Gitlab
stream.each_line do |line|
s = StringScanner.new(line)
until s.eos?
- if s.scan(/\e([@-_])(.*?)([@-~])/)
+
+ if s.scan(Gitlab::Regex.build_trace_section_regex)
+ handle_section(s)
+ elsif s.scan(/\e([@-_])(.*?)([@-~])/)
handle_sequence(s)
elsif s.scan(/\e(([@-_])(.*?)?)?$/)
break
@@ -166,6 +170,7 @@ module Gitlab
else
@out << s.scan(/./m)
end
+
@offset += s.matched_size
end
end
@@ -183,6 +188,15 @@ module Gitlab
)
end
+ def handle_section(s)
+ action = s[1]
+ timestamp = s[2]
+ section = s[3]
+ line = s.matched()[0...-5] # strips \r\033[0K
+
+ @out << %{<div class="hidden" data-action="#{action}" data-timestamp="#{timestamp}" data-section="#{section}">#{line}</div>}
+ end
+
def handle_sequence(s)
indicator = s[1]
commands = s[2].split ';'
@@ -223,10 +237,12 @@ module Gitlab
# Most terminals show bold colored text in the light color variant
# Let's mimic that here
if @style_mask & STYLE_SWITCHES[:bold] != 0
- fg_color.sub!(/fg-(\w{2,}+)/, 'fg-l-\1')
+ fg_color.sub!(/fg-([a-z]{2,}+)/, 'fg-l-\1')
end
+
css_classes << fg_color
end
+
css_classes << @bg_color unless @bg_color.nil?
STYLE_SWITCHES.each do |css_class, flag|
diff --git a/lib/gitlab/ci/build/artifacts/metadata.rb b/lib/gitlab/ci/build/artifacts/metadata.rb
index a788fb3fcbc..0bbd60d8ffe 100644
--- a/lib/gitlab/ci/build/artifacts/metadata.rb
+++ b/lib/gitlab/ci/build/artifacts/metadata.rb
@@ -98,6 +98,7 @@ module Gitlab
def read_string(gz)
string_size = read_uint32(gz)
return nil unless string_size
+
gz.read(string_size)
end
diff --git a/lib/gitlab/ci/build/artifacts/metadata/entry.rb b/lib/gitlab/ci/build/artifacts/metadata/entry.rb
index 22941d48edf..428c0505808 100644
--- a/lib/gitlab/ci/build/artifacts/metadata/entry.rb
+++ b/lib/gitlab/ci/build/artifacts/metadata/entry.rb
@@ -43,6 +43,7 @@ module Gitlab
def parent
return nil unless has_parent?
+
self.class.new(@path.to_s.chomp(basename), @entries)
end
@@ -64,6 +65,7 @@ module Gitlab
def directories(opts = {})
return [] unless directory?
+
dirs = children.select(&:directory?)
return dirs unless has_parent? && opts[:parent]
@@ -74,6 +76,7 @@ module Gitlab
def files
return [] unless directory?
+
children.select(&:file?)
end
@@ -94,7 +97,7 @@ module Gitlab
end
def total_size
- descendant_pattern = %r{^#{Regexp.escape(@path.to_s)}}
+ descendant_pattern = /^#{Regexp.escape(@path.to_s)}/
entries.sum do |path, entry|
(entry[:size] if path =~ descendant_pattern).to_i
end
diff --git a/lib/gitlab/ci/build/image.rb b/lib/gitlab/ci/build/image.rb
index b88b2e36d53..c811f88f483 100644
--- a/lib/gitlab/ci/build/image.rb
+++ b/lib/gitlab/ci/build/image.rb
@@ -8,6 +8,7 @@ module Gitlab
def from_image(job)
image = Gitlab::Ci::Build::Image.new(job.options[:image])
return unless image.valid?
+
image
end
diff --git a/lib/gitlab/ci/charts.rb b/lib/gitlab/ci/charts.rb
index 7df7b542d91..46ed330dbbf 100644
--- a/lib/gitlab/ci/charts.rb
+++ b/lib/gitlab/ci/charts.rb
@@ -6,7 +6,7 @@ module Gitlab
query
.group("DATE(#{::Ci::Pipeline.table_name}.created_at)")
.count(:created_at)
- .transform_keys { |date| date.strftime(@format) }
+ .transform_keys { |date| date.strftime(@format) } # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
def interval_step
@@ -68,10 +68,11 @@ module Gitlab
class YearChart < Chart
include MonthlyInterval
+ attr_reader :to, :from
def initialize(*)
- @to = Date.today.end_of_month
- @from = @to.years_ago(1).beginning_of_month
+ @to = Date.today.end_of_month.end_of_day
+ @from = @to.years_ago(1).beginning_of_month.beginning_of_day
@format = '%d %B %Y'
super
@@ -80,10 +81,11 @@ module Gitlab
class MonthChart < Chart
include DailyInterval
+ attr_reader :to, :from
def initialize(*)
- @to = Date.today
- @from = @to - 30.days
+ @to = Date.today.end_of_day
+ @from = 1.month.ago.beginning_of_day
@format = '%d %B'
super
@@ -92,10 +94,11 @@ module Gitlab
class WeekChart < Chart
include DailyInterval
+ attr_reader :to, :from
def initialize(*)
- @to = Date.today
- @from = @to - 7.days
+ @to = Date.today.end_of_day
+ @from = 1.week.ago.beginning_of_day
@format = '%d %B'
super
diff --git a/lib/gitlab/ci/config/entry/configurable.rb b/lib/gitlab/ci/config/entry/configurable.rb
index 68b6742385a..db47c2f6185 100644
--- a/lib/gitlab/ci/config/entry/configurable.rb
+++ b/lib/gitlab/ci/config/entry/configurable.rb
@@ -29,15 +29,15 @@ module Gitlab
self.class.nodes.each do |key, factory|
factory
- .value(@config[key])
+ .value(config[key])
.with(key: key, parent: self)
- @entries[key] = factory.create!
+ entries[key] = factory.create!
end
yield if block_given?
- @entries.each_value do |entry|
+ entries.each_value do |entry|
entry.compose!(deps)
end
end
@@ -59,13 +59,13 @@ module Gitlab
def helpers(*nodes)
nodes.each do |symbol|
define_method("#{symbol}_defined?") do
- @entries[symbol]&.specified?
+ entries[symbol]&.specified?
end
define_method("#{symbol}_value") do
- return unless @entries[symbol] && @entries[symbol].valid?
+ return unless entries[symbol] && entries[symbol].valid?
- @entries[symbol].value
+ entries[symbol].value
end
end
end
diff --git a/lib/gitlab/ci/config/entry/image.rb b/lib/gitlab/ci/config/entry/image.rb
index 6555c589173..2844be80a84 100644
--- a/lib/gitlab/ci/config/entry/image.rb
+++ b/lib/gitlab/ci/config/entry/image.rb
@@ -37,6 +37,7 @@ module Gitlab
def value
return { name: @config } if string?
return @config if hash?
+
{}
end
end
diff --git a/lib/gitlab/ci/config/entry/node.rb b/lib/gitlab/ci/config/entry/node.rb
index c868943c42e..26505c91be3 100644
--- a/lib/gitlab/ci/config/entry/node.rb
+++ b/lib/gitlab/ci/config/entry/node.rb
@@ -90,6 +90,10 @@ module Gitlab
def self.aspects
@aspects ||= []
end
+
+ private
+
+ attr_reader :entries
end
end
end
diff --git a/lib/gitlab/ci/config/entry/validatable.rb b/lib/gitlab/ci/config/entry/validatable.rb
index 5ced778d311..e45787773a8 100644
--- a/lib/gitlab/ci/config/entry/validatable.rb
+++ b/lib/gitlab/ci/config/entry/validatable.rb
@@ -13,7 +13,7 @@ module Gitlab
end
def errors
- @validator.messages + descendants.flat_map(&:errors)
+ @validator.messages + descendants.flat_map(&:errors) # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
class_methods do
diff --git a/lib/gitlab/ci/config/entry/validators.rb b/lib/gitlab/ci/config/entry/validators.rb
index 0159179f0a9..55658900628 100644
--- a/lib/gitlab/ci/config/entry/validators.rb
+++ b/lib/gitlab/ci/config/entry/validators.rb
@@ -64,10 +64,24 @@ module Gitlab
include LegacyValidationHelpers
def validate_each(record, attribute, value)
- unless validate_string(value)
+ if validate_string(value)
+ validate_path(record, attribute, value)
+ else
record.errors.add(attribute, 'should be a string or symbol')
end
end
+
+ private
+
+ def validate_path(record, attribute, value)
+ path = CGI.unescape(value.to_s)
+
+ if path.include?('/')
+ record.errors.add(attribute, 'cannot contain the "/" character')
+ elsif path == '.' || path == '..'
+ record.errors.add(attribute, 'cannot be "." or ".."')
+ end
+ end
end
class RegexpValidator < ActiveModel::EachValidator
@@ -111,6 +125,7 @@ module Gitlab
def validate_string_or_regexp(value)
return false unless value.is_a?(String)
return validate_regexp(value) if look_like_regexp?(value)
+
true
end
end
diff --git a/lib/gitlab/ci/config/loader.rb b/lib/gitlab/ci/config/loader.rb
index e7d9f6a7761..141d2714cb6 100644
--- a/lib/gitlab/ci/config/loader.rb
+++ b/lib/gitlab/ci/config/loader.rb
@@ -6,6 +6,8 @@ module Gitlab
def initialize(config)
@config = YAML.safe_load(config, [Symbol], [], true)
+ rescue Psych::Exception => e
+ raise FormatError, e.message
end
def valid?
diff --git a/lib/gitlab/ci/pipeline/chain/base.rb b/lib/gitlab/ci/pipeline/chain/base.rb
new file mode 100644
index 00000000000..efed19da21c
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/base.rb
@@ -0,0 +1,26 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ class Base
+ attr_reader :pipeline, :command
+
+ delegate :project, :current_user, to: :command
+
+ def initialize(pipeline, command)
+ @pipeline = pipeline
+ @command = command
+ end
+
+ def perform!
+ raise NotImplementedError
+ end
+
+ def break?
+ raise NotImplementedError
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/build.rb b/lib/gitlab/ci/pipeline/chain/build.rb
new file mode 100644
index 00000000000..70732d26bbd
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/build.rb
@@ -0,0 +1,30 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ class Build < Chain::Base
+ def perform!
+ @pipeline.assign_attributes(
+ source: @command.source,
+ project: @command.project,
+ ref: @command.ref,
+ sha: @command.sha,
+ before_sha: @command.before_sha,
+ tag: @command.tag_exists?,
+ trigger_requests: Array(@command.trigger_request),
+ user: @command.current_user,
+ pipeline_schedule: @command.schedule,
+ protected: @command.protected_ref?
+ )
+
+ @pipeline.set_config_source
+ end
+
+ def break?
+ false
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/command.rb b/lib/gitlab/ci/pipeline/chain/command.rb
new file mode 100644
index 00000000000..7b19b10e05b
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/command.rb
@@ -0,0 +1,61 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ Command = Struct.new(
+ :source, :project, :current_user,
+ :origin_ref, :checkout_sha, :after_sha, :before_sha,
+ :trigger_request, :schedule,
+ :ignore_skip_ci, :save_incompleted,
+ :seeds_block
+ ) do
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(**params)
+ params.each do |key, value|
+ self[key] = value
+ end
+ end
+
+ def branch_exists?
+ strong_memoize(:is_branch) do
+ project.repository.branch_exists?(ref)
+ end
+ end
+
+ def tag_exists?
+ strong_memoize(:is_tag) do
+ project.repository.tag_exists?(ref)
+ end
+ end
+
+ def ref
+ strong_memoize(:ref) do
+ Gitlab::Git.ref_name(origin_ref)
+ end
+ end
+
+ def sha
+ strong_memoize(:sha) do
+ project.commit(origin_sha || origin_ref).try(:id)
+ end
+ end
+
+ def origin_sha
+ checkout_sha || after_sha
+ end
+
+ def before_sha
+ self[:before_sha] || checkout_sha || Gitlab::Git::BLANK_SHA
+ end
+
+ def protected_ref?
+ strong_memoize(:protected_ref) do
+ project.protected_for?(ref)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/create.rb b/lib/gitlab/ci/pipeline/chain/create.rb
new file mode 100644
index 00000000000..d5e17a123df
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/create.rb
@@ -0,0 +1,29 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ class Create < Chain::Base
+ include Chain::Helpers
+
+ def perform!
+ ::Ci::Pipeline.transaction do
+ pipeline.save!
+
+ @command.seeds_block&.call(pipeline)
+
+ ::Ci::CreatePipelineStagesService
+ .new(project, current_user)
+ .execute(pipeline)
+ end
+ rescue ActiveRecord::RecordInvalid => e
+ error("Failed to persist the pipeline: #{e}")
+ end
+
+ def break?
+ !pipeline.persisted?
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/helpers.rb b/lib/gitlab/ci/pipeline/chain/helpers.rb
new file mode 100644
index 00000000000..bf1380a1da9
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/helpers.rb
@@ -0,0 +1,13 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ module Helpers
+ def error(message)
+ pipeline.errors.add(:base, message)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/sequence.rb b/lib/gitlab/ci/pipeline/chain/sequence.rb
new file mode 100644
index 00000000000..e24630656d3
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/sequence.rb
@@ -0,0 +1,35 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ class Sequence
+ def initialize(pipeline, command, sequence)
+ @pipeline = pipeline
+ @command = command
+ @sequence = sequence
+ @completed = []
+ end
+
+ def build!
+ @sequence.each do |chain|
+ step = chain.new(@pipeline, @command)
+
+ step.perform!
+ break if step.break?
+
+ @completed.push(step)
+ end
+
+ @pipeline.tap do
+ yield @pipeline, self if block_given?
+ end
+ end
+
+ def complete?
+ @completed.size == @sequence.size
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/skip.rb b/lib/gitlab/ci/pipeline/chain/skip.rb
new file mode 100644
index 00000000000..32cbb7ca6af
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/skip.rb
@@ -0,0 +1,37 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ class Skip < Chain::Base
+ include ::Gitlab::Utils::StrongMemoize
+
+ SKIP_PATTERN = /\[(ci[ _-]skip|skip[ _-]ci)\]/i
+
+ def perform!
+ if skipped?
+ @pipeline.skip if @command.save_incompleted
+ end
+ end
+
+ def skipped?
+ !@command.ignore_skip_ci && commit_message_skips_ci?
+ end
+
+ def break?
+ skipped?
+ end
+
+ private
+
+ def commit_message_skips_ci?
+ return false unless @pipeline.git_commit_message
+
+ strong_memoize(:commit_message_skips_ci) do
+ !!(@pipeline.git_commit_message =~ SKIP_PATTERN)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/validate/abilities.rb b/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
new file mode 100644
index 00000000000..13c6fedd831
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
@@ -0,0 +1,54 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ module Validate
+ class Abilities < Chain::Base
+ include Gitlab::Allowable
+ include Chain::Helpers
+
+ def perform!
+ unless project.builds_enabled?
+ return error('Pipelines are disabled!')
+ end
+
+ unless allowed_to_trigger_pipeline?
+ if can?(current_user, :create_pipeline, project)
+ return error("Insufficient permissions for protected ref '#{command.ref}'")
+ else
+ return error('Insufficient permissions to create a new pipeline')
+ end
+ end
+ end
+
+ def break?
+ @pipeline.errors.any?
+ end
+
+ def allowed_to_trigger_pipeline?
+ if current_user
+ allowed_to_create?
+ else # legacy triggers don't have a corresponding user
+ !@command.protected_ref?
+ end
+ end
+
+ def allowed_to_create?
+ return unless can?(current_user, :create_pipeline, project)
+
+ access = Gitlab::UserAccess.new(current_user, project: project)
+
+ if @command.branch_exists?
+ access.can_update_branch?(@command.ref)
+ elsif @command.tag_exists?
+ access.can_create_tag?(@command.ref)
+ else
+ true # Allow it for now and we'll reject when we check ref existence
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/validate/config.rb b/lib/gitlab/ci/pipeline/chain/validate/config.rb
new file mode 100644
index 00000000000..075504bcce5
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/validate/config.rb
@@ -0,0 +1,35 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ module Validate
+ class Config < Chain::Base
+ include Chain::Helpers
+
+ def perform!
+ unless @pipeline.config_processor
+ unless @pipeline.ci_yaml_file
+ return error("Missing #{@pipeline.ci_yaml_file_path} file")
+ end
+
+ if @command.save_incompleted && @pipeline.has_yaml_errors?
+ @pipeline.drop!(:config_error)
+ end
+
+ return error(@pipeline.yaml_errors)
+ end
+
+ unless @pipeline.has_stage_seeds?
+ return error('No stages / jobs for this pipeline.')
+ end
+ end
+
+ def break?
+ @pipeline.errors.any? || @pipeline.persisted?
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/validate/repository.rb b/lib/gitlab/ci/pipeline/chain/validate/repository.rb
new file mode 100644
index 00000000000..9699c24e5b6
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/validate/repository.rb
@@ -0,0 +1,27 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ module Validate
+ class Repository < Chain::Base
+ include Chain::Helpers
+
+ def perform!
+ unless @command.branch_exists? || @command.tag_exists?
+ return error('Reference not found')
+ end
+
+ unless @command.sha
+ return error('Commit not found')
+ end
+ end
+
+ def break?
+ @pipeline.errors.any?
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/duration.rb b/lib/gitlab/ci/pipeline/duration.rb
new file mode 100644
index 00000000000..469fc094cc8
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/duration.rb
@@ -0,0 +1,143 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ # # Introduction - total running time
+ #
+ # The problem this module is trying to solve is finding the total running
+ # time amongst all the jobs, excluding retries and pending (queue) time.
+ # We could reduce this problem down to finding the union of periods.
+ #
+ # So each job would be represented as a `Period`, which consists of
+ # `Period#first` as when the job started and `Period#last` as when the
+ # job was finished. A simple example here would be:
+ #
+ # * A (1, 3)
+ # * B (2, 4)
+ # * C (6, 7)
+ #
+ # Here A begins from 1, and ends to 3. B begins from 2, and ends to 4.
+ # C begins from 6, and ends to 7. Visually it could be viewed as:
+ #
+ # 0 1 2 3 4 5 6 7
+ # AAAAAAA
+ # BBBBBBB
+ # CCCC
+ #
+ # The union of A, B, and C would be (1, 4) and (6, 7), therefore the
+ # total running time should be:
+ #
+ # (4 - 1) + (7 - 6) => 4
+ #
+ # # The Algorithm
+ #
+ # The algorithm used here for union would be described as follow.
+ # First we make sure that all periods are sorted by `Period#first`.
+ # Then we try to merge periods by iterating through the first period
+ # to the last period. The goal would be merging all overlapped periods
+ # so that in the end all the periods are discrete. When all periods
+ # are discrete, we're free to just sum all the periods to get real
+ # running time.
+ #
+ # Here we begin from A, and compare it to B. We could find that
+ # before A ends, B already started. That is `B.first <= A.last`
+ # that is `2 <= 3` which means A and B are overlapping!
+ #
+ # When we found that two periods are overlapping, we would need to merge
+ # them into a new period and disregard the old periods. To make a new
+ # period, we take `A.first` as the new first because remember? we sorted
+ # them, so `A.first` must be smaller or equal to `B.first`. And we take
+ # `[A.last, B.last].max` as the new last because we want whoever ended
+ # later. This could be broken into two cases:
+ #
+ # 0 1 2 3 4
+ # AAAAAAA
+ # BBBBBBB
+ #
+ # Or:
+ #
+ # 0 1 2 3 4
+ # AAAAAAAAAA
+ # BBBB
+ #
+ # So that we need to take whoever ends later. Back to our example,
+ # after merging and discard A and B it could be visually viewed as:
+ #
+ # 0 1 2 3 4 5 6 7
+ # DDDDDDDDDD
+ # CCCC
+ #
+ # Now we could go on and compare the newly created D and the old C.
+ # We could figure out that D and C are not overlapping by checking
+ # `C.first <= D.last` is `false`. Therefore we need to keep both C
+ # and D. The example would end here because there are no more jobs.
+ #
+ # After having the union of all periods, we just need to sum the length
+ # of all periods to get total time.
+ #
+ # (4 - 1) + (7 - 6) => 4
+ #
+ # That is 4 is the answer in the example.
+ module Duration
+ extend self
+
+ Period = Struct.new(:first, :last) do
+ def duration
+ last - first
+ end
+ end
+
+ def from_pipeline(pipeline)
+ status = %w[success failed running canceled]
+ builds = pipeline.builds.latest
+ .where(status: status).where.not(started_at: nil).order(:started_at)
+
+ from_builds(builds)
+ end
+
+ def from_builds(builds)
+ now = Time.now
+
+ periods = builds.map do |b|
+ Period.new(b.started_at, b.finished_at || now)
+ end
+
+ from_periods(periods)
+ end
+
+ # periods should be sorted by `first`
+ def from_periods(periods)
+ process_duration(process_periods(periods))
+ end
+
+ private
+
+ def process_periods(periods)
+ return periods if periods.empty?
+
+ periods.drop(1).inject([periods.first]) do |result, current|
+ previous = result.last
+
+ if overlap?(previous, current)
+ result[-1] = merge(previous, current)
+ result
+ else
+ result << current
+ end
+ end
+ end
+
+ def overlap?(previous, current)
+ current.first <= previous.last
+ end
+
+ def merge(previous, current)
+ Period.new(previous.first, [previous.last, current.last].max)
+ end
+
+ def process_duration(periods)
+ periods.sum(&:duration)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/lexeme/base.rb b/lib/gitlab/ci/pipeline/expression/lexeme/base.rb
new file mode 100644
index 00000000000..047ab66e9b3
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/lexeme/base.rb
@@ -0,0 +1,25 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ module Lexeme
+ class Base
+ def evaluate(**variables)
+ raise NotImplementedError
+ end
+
+ def self.build(token)
+ raise NotImplementedError
+ end
+
+ def self.scan(scanner)
+ if scanner.scan(self::PATTERN)
+ Expression::Token.new(scanner.matched, self)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/lexeme/equals.rb b/lib/gitlab/ci/pipeline/expression/lexeme/equals.rb
new file mode 100644
index 00000000000..3a2f0c6924e
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/lexeme/equals.rb
@@ -0,0 +1,26 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ module Lexeme
+ class Equals < Lexeme::Operator
+ PATTERN = /==/.freeze
+
+ def initialize(left, right)
+ @left = left
+ @right = right
+ end
+
+ def evaluate(variables = {})
+ @left.evaluate(variables) == @right.evaluate(variables)
+ end
+
+ def self.build(_value, behind, ahead)
+ new(behind, ahead)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/lexeme/null.rb b/lib/gitlab/ci/pipeline/expression/lexeme/null.rb
new file mode 100644
index 00000000000..a2778716924
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/lexeme/null.rb
@@ -0,0 +1,25 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ module Lexeme
+ class Null < Lexeme::Value
+ PATTERN = /null/.freeze
+
+ def initialize(value = nil)
+ @value = nil
+ end
+
+ def evaluate(variables = {})
+ nil
+ end
+
+ def self.build(_value)
+ self.new
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/lexeme/operator.rb b/lib/gitlab/ci/pipeline/expression/lexeme/operator.rb
new file mode 100644
index 00000000000..f640d0b5855
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/lexeme/operator.rb
@@ -0,0 +1,15 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ module Lexeme
+ class Operator < Lexeme::Base
+ def self.type
+ :operator
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/lexeme/string.rb b/lib/gitlab/ci/pipeline/expression/lexeme/string.rb
new file mode 100644
index 00000000000..48bde213d44
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/lexeme/string.rb
@@ -0,0 +1,25 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ module Lexeme
+ class String < Lexeme::Value
+ PATTERN = /("(?<string>.+?)")|('(?<string>.+?)')/.freeze
+
+ def initialize(value)
+ @value = value
+ end
+
+ def evaluate(variables = {})
+ @value.to_s
+ end
+
+ def self.build(string)
+ new(string.match(PATTERN)[:string])
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/lexeme/value.rb b/lib/gitlab/ci/pipeline/expression/lexeme/value.rb
new file mode 100644
index 00000000000..f2611d65faf
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/lexeme/value.rb
@@ -0,0 +1,15 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ module Lexeme
+ class Value < Lexeme::Base
+ def self.type
+ :value
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/lexeme/variable.rb b/lib/gitlab/ci/pipeline/expression/lexeme/variable.rb
new file mode 100644
index 00000000000..b781c15fd67
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/lexeme/variable.rb
@@ -0,0 +1,25 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ module Lexeme
+ class Variable < Lexeme::Value
+ PATTERN = /\$(?<name>\w+)/.freeze
+
+ def initialize(name)
+ @name = name
+ end
+
+ def evaluate(variables = {})
+ HashWithIndifferentAccess.new(variables).fetch(@name, nil)
+ end
+
+ def self.build(string)
+ new(string.match(PATTERN)[:name])
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/lexer.rb b/lib/gitlab/ci/pipeline/expression/lexer.rb
new file mode 100644
index 00000000000..e1c68b7c3c2
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/lexer.rb
@@ -0,0 +1,59 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ class Lexer
+ include ::Gitlab::Utils::StrongMemoize
+
+ LEXEMES = [
+ Expression::Lexeme::Variable,
+ Expression::Lexeme::String,
+ Expression::Lexeme::Null,
+ Expression::Lexeme::Equals
+ ].freeze
+
+ SyntaxError = Class.new(Statement::StatementError)
+
+ MAX_TOKENS = 100
+
+ def initialize(statement, max_tokens: MAX_TOKENS)
+ @scanner = StringScanner.new(statement)
+ @max_tokens = max_tokens
+ end
+
+ def tokens
+ strong_memoize(:tokens) { tokenize }
+ end
+
+ def lexemes
+ tokens.map(&:to_lexeme)
+ end
+
+ private
+
+ def tokenize
+ tokens = []
+
+ @max_tokens.times do
+ @scanner.skip(/\s+/) # ignore whitespace
+
+ return tokens if @scanner.eos?
+
+ lexeme = LEXEMES.find do |type|
+ type.scan(@scanner).tap do |token|
+ tokens.push(token) if token.present?
+ end
+ end
+
+ unless lexeme.present?
+ raise Lexer::SyntaxError, 'Unknown lexeme found!'
+ end
+ end
+
+ raise Lexer::SyntaxError, 'Too many tokens!'
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/parser.rb b/lib/gitlab/ci/pipeline/expression/parser.rb
new file mode 100644
index 00000000000..90f94d0b763
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/parser.rb
@@ -0,0 +1,40 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ class Parser
+ def initialize(tokens)
+ @tokens = tokens.to_enum
+ @nodes = []
+ end
+
+ ##
+ # This produces a reverse descent parse tree.
+ #
+ # It currently does not support precedence of operators.
+ #
+ def tree
+ while token = @tokens.next
+ case token.type
+ when :operator
+ token.build(@nodes.pop, tree).tap do |node|
+ @nodes.push(node)
+ end
+ when :value
+ token.build.tap do |leaf|
+ @nodes.push(leaf)
+ end
+ end
+ end
+ rescue StopIteration
+ @nodes.last || Lexeme::Null.new
+ end
+
+ def self.seed(statement)
+ new(Expression::Lexer.new(statement).tokens)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/statement.rb b/lib/gitlab/ci/pipeline/expression/statement.rb
new file mode 100644
index 00000000000..4f0e101b730
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/statement.rb
@@ -0,0 +1,42 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ class Statement
+ StatementError = Class.new(StandardError)
+
+ GRAMMAR = [
+ %w[variable equals string],
+ %w[variable equals variable],
+ %w[variable equals null],
+ %w[string equals variable],
+ %w[null equals variable],
+ %w[variable]
+ ].freeze
+
+ def initialize(statement, pipeline)
+ @lexer = Expression::Lexer.new(statement)
+
+ @variables = pipeline.variables.map do |variable|
+ [variable.key, variable.value]
+ end
+ end
+
+ def parse_tree
+ raise StatementError if @lexer.lexemes.empty?
+
+ unless GRAMMAR.find { |syntax| syntax == @lexer.lexemes }
+ raise StatementError, 'Unknown pipeline expression!'
+ end
+
+ Expression::Parser.new(@lexer.tokens).tree
+ end
+
+ def evaluate
+ parse_tree.evaluate(@variables.to_h)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/expression/token.rb b/lib/gitlab/ci/pipeline/expression/token.rb
new file mode 100644
index 00000000000..58211800b88
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/expression/token.rb
@@ -0,0 +1,28 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Expression
+ class Token
+ attr_reader :value, :lexeme
+
+ def initialize(value, lexeme)
+ @value = value
+ @lexeme = lexeme
+ end
+
+ def build(*args)
+ @lexeme.build(@value, *args)
+ end
+
+ def type
+ @lexeme.type
+ end
+
+ def to_lexeme
+ @lexeme.name.demodulize.downcase
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline_duration.rb b/lib/gitlab/ci/pipeline_duration.rb
deleted file mode 100644
index 3208cc2bef6..00000000000
--- a/lib/gitlab/ci/pipeline_duration.rb
+++ /dev/null
@@ -1,141 +0,0 @@
-module Gitlab
- module Ci
- # # Introduction - total running time
- #
- # The problem this module is trying to solve is finding the total running
- # time amongst all the jobs, excluding retries and pending (queue) time.
- # We could reduce this problem down to finding the union of periods.
- #
- # So each job would be represented as a `Period`, which consists of
- # `Period#first` as when the job started and `Period#last` as when the
- # job was finished. A simple example here would be:
- #
- # * A (1, 3)
- # * B (2, 4)
- # * C (6, 7)
- #
- # Here A begins from 1, and ends to 3. B begins from 2, and ends to 4.
- # C begins from 6, and ends to 7. Visually it could be viewed as:
- #
- # 0 1 2 3 4 5 6 7
- # AAAAAAA
- # BBBBBBB
- # CCCC
- #
- # The union of A, B, and C would be (1, 4) and (6, 7), therefore the
- # total running time should be:
- #
- # (4 - 1) + (7 - 6) => 4
- #
- # # The Algorithm
- #
- # The algorithm used here for union would be described as follow.
- # First we make sure that all periods are sorted by `Period#first`.
- # Then we try to merge periods by iterating through the first period
- # to the last period. The goal would be merging all overlapped periods
- # so that in the end all the periods are discrete. When all periods
- # are discrete, we're free to just sum all the periods to get real
- # running time.
- #
- # Here we begin from A, and compare it to B. We could find that
- # before A ends, B already started. That is `B.first <= A.last`
- # that is `2 <= 3` which means A and B are overlapping!
- #
- # When we found that two periods are overlapping, we would need to merge
- # them into a new period and disregard the old periods. To make a new
- # period, we take `A.first` as the new first because remember? we sorted
- # them, so `A.first` must be smaller or equal to `B.first`. And we take
- # `[A.last, B.last].max` as the new last because we want whoever ended
- # later. This could be broken into two cases:
- #
- # 0 1 2 3 4
- # AAAAAAA
- # BBBBBBB
- #
- # Or:
- #
- # 0 1 2 3 4
- # AAAAAAAAAA
- # BBBB
- #
- # So that we need to take whoever ends later. Back to our example,
- # after merging and discard A and B it could be visually viewed as:
- #
- # 0 1 2 3 4 5 6 7
- # DDDDDDDDDD
- # CCCC
- #
- # Now we could go on and compare the newly created D and the old C.
- # We could figure out that D and C are not overlapping by checking
- # `C.first <= D.last` is `false`. Therefore we need to keep both C
- # and D. The example would end here because there are no more jobs.
- #
- # After having the union of all periods, we just need to sum the length
- # of all periods to get total time.
- #
- # (4 - 1) + (7 - 6) => 4
- #
- # That is 4 is the answer in the example.
- module PipelineDuration
- extend self
-
- Period = Struct.new(:first, :last) do
- def duration
- last - first
- end
- end
-
- def from_pipeline(pipeline)
- status = %w[success failed running canceled]
- builds = pipeline.builds.latest
- .where(status: status).where.not(started_at: nil).order(:started_at)
-
- from_builds(builds)
- end
-
- def from_builds(builds)
- now = Time.now
-
- periods = builds.map do |b|
- Period.new(b.started_at, b.finished_at || now)
- end
-
- from_periods(periods)
- end
-
- # periods should be sorted by `first`
- def from_periods(periods)
- process_duration(process_periods(periods))
- end
-
- private
-
- def process_periods(periods)
- return periods if periods.empty?
-
- periods.drop(1).inject([periods.first]) do |result, current|
- previous = result.last
-
- if overlap?(previous, current)
- result[-1] = merge(previous, current)
- result
- else
- result << current
- end
- end
- end
-
- def overlap?(previous, current)
- current.first <= previous.last
- end
-
- def merge(previous, current)
- Period.new(previous.first, [previous.last, current.last].max)
- end
-
- def process_duration(periods)
- periods.sum(&:duration)
- end
- end
- end
-end
diff --git a/lib/gitlab/ci/stage/seed.rb b/lib/gitlab/ci/stage/seed.rb
index e19aae35a81..f33c87f554d 100644
--- a/lib/gitlab/ci/stage/seed.rb
+++ b/lib/gitlab/ci/stage/seed.rb
@@ -2,8 +2,12 @@ module Gitlab
module Ci
module Stage
class Seed
+ include ::Gitlab::Utils::StrongMemoize
+
attr_reader :pipeline
+
delegate :project, to: :pipeline
+ delegate :size, to: :@jobs
def initialize(pipeline, stage, jobs)
@pipeline = pipeline
@@ -48,7 +52,9 @@ module Gitlab
private
def protected_ref?
- @protected_ref ||= project.protected_for?(pipeline.ref)
+ strong_memoize(:protected_ref) do
+ project.protected_for?(pipeline.ref)
+ end
end
end
end
diff --git a/lib/gitlab/ci/status/build/action.rb b/lib/gitlab/ci/status/build/action.rb
index 45fd0d4aa07..6c9125647ad 100644
--- a/lib/gitlab/ci/status/build/action.rb
+++ b/lib/gitlab/ci/status/build/action.rb
@@ -2,6 +2,9 @@ module Gitlab
module Ci
module Status
module Build
+ ##
+ # Extended status for playable manual actions.
+ #
class Action < Status::Extended
def label
if has_action?
@@ -12,7 +15,7 @@ module Gitlab
end
def self.matches?(build, user)
- build.action?
+ build.playable?
end
end
end
diff --git a/lib/gitlab/ci/status/build/cancelable.rb b/lib/gitlab/ci/status/build/cancelable.rb
index 8ad3e57e59d..2d9166d6bdd 100644
--- a/lib/gitlab/ci/status/build/cancelable.rb
+++ b/lib/gitlab/ci/status/build/cancelable.rb
@@ -8,7 +8,7 @@ module Gitlab
end
def action_icon
- 'icon_action_cancel'
+ 'cancel'
end
def action_path
diff --git a/lib/gitlab/ci/status/build/failed_allowed.rb b/lib/gitlab/ci/status/build/failed_allowed.rb
index e42d3574357..dc90f398c7e 100644
--- a/lib/gitlab/ci/status/build/failed_allowed.rb
+++ b/lib/gitlab/ci/status/build/failed_allowed.rb
@@ -8,7 +8,7 @@ module Gitlab
end
def icon
- 'icon_status_warning'
+ 'status_warning'
end
def group
diff --git a/lib/gitlab/ci/status/build/play.rb b/lib/gitlab/ci/status/build/play.rb
index c7726543599..b7b45466d3b 100644
--- a/lib/gitlab/ci/status/build/play.rb
+++ b/lib/gitlab/ci/status/build/play.rb
@@ -12,7 +12,7 @@ module Gitlab
end
def action_icon
- 'icon_action_play'
+ 'play'
end
def action_title
diff --git a/lib/gitlab/ci/status/build/retryable.rb b/lib/gitlab/ci/status/build/retryable.rb
index 8c8fdc56d75..44ffe783e50 100644
--- a/lib/gitlab/ci/status/build/retryable.rb
+++ b/lib/gitlab/ci/status/build/retryable.rb
@@ -8,7 +8,7 @@ module Gitlab
end
def action_icon
- 'icon_action_retry'
+ 'retry'
end
def action_title
diff --git a/lib/gitlab/ci/status/build/stop.rb b/lib/gitlab/ci/status/build/stop.rb
index d464738deaf..46e730797e4 100644
--- a/lib/gitlab/ci/status/build/stop.rb
+++ b/lib/gitlab/ci/status/build/stop.rb
@@ -12,7 +12,7 @@ module Gitlab
end
def action_icon
- 'icon_action_stop'
+ 'stop'
end
def action_title
diff --git a/lib/gitlab/ci/status/canceled.rb b/lib/gitlab/ci/status/canceled.rb
index e5fdc1f8136..e6195a60d4f 100644
--- a/lib/gitlab/ci/status/canceled.rb
+++ b/lib/gitlab/ci/status/canceled.rb
@@ -11,7 +11,7 @@ module Gitlab
end
def icon
- 'icon_status_canceled'
+ 'status_canceled'
end
def favicon
diff --git a/lib/gitlab/ci/status/created.rb b/lib/gitlab/ci/status/created.rb
index d188bd286a6..846f00b83dd 100644
--- a/lib/gitlab/ci/status/created.rb
+++ b/lib/gitlab/ci/status/created.rb
@@ -11,7 +11,7 @@ module Gitlab
end
def icon
- 'icon_status_created'
+ 'status_created'
end
def favicon
diff --git a/lib/gitlab/ci/status/failed.rb b/lib/gitlab/ci/status/failed.rb
index 38e45714c22..27ce85bd3ed 100644
--- a/lib/gitlab/ci/status/failed.rb
+++ b/lib/gitlab/ci/status/failed.rb
@@ -11,7 +11,7 @@ module Gitlab
end
def icon
- 'icon_status_failed'
+ 'status_failed'
end
def favicon
diff --git a/lib/gitlab/ci/status/manual.rb b/lib/gitlab/ci/status/manual.rb
index a4a7edadac9..fc387e2fd25 100644
--- a/lib/gitlab/ci/status/manual.rb
+++ b/lib/gitlab/ci/status/manual.rb
@@ -11,7 +11,7 @@ module Gitlab
end
def icon
- 'icon_status_manual'
+ 'status_manual'
end
def favicon
diff --git a/lib/gitlab/ci/status/pending.rb b/lib/gitlab/ci/status/pending.rb
index 5164260b861..6780780db32 100644
--- a/lib/gitlab/ci/status/pending.rb
+++ b/lib/gitlab/ci/status/pending.rb
@@ -11,7 +11,7 @@ module Gitlab
end
def icon
- 'icon_status_pending'
+ 'status_pending'
end
def favicon
diff --git a/lib/gitlab/ci/status/running.rb b/lib/gitlab/ci/status/running.rb
index 993937e98ca..ee13905e46d 100644
--- a/lib/gitlab/ci/status/running.rb
+++ b/lib/gitlab/ci/status/running.rb
@@ -11,7 +11,7 @@ module Gitlab
end
def icon
- 'icon_status_running'
+ 'status_running'
end
def favicon
diff --git a/lib/gitlab/ci/status/skipped.rb b/lib/gitlab/ci/status/skipped.rb
index 0c942920b02..0dbdc4de426 100644
--- a/lib/gitlab/ci/status/skipped.rb
+++ b/lib/gitlab/ci/status/skipped.rb
@@ -11,7 +11,7 @@ module Gitlab
end
def icon
- 'icon_status_skipped'
+ 'status_skipped'
end
def favicon
diff --git a/lib/gitlab/ci/status/success.rb b/lib/gitlab/ci/status/success.rb
index d7af98857b0..731013ec017 100644
--- a/lib/gitlab/ci/status/success.rb
+++ b/lib/gitlab/ci/status/success.rb
@@ -11,7 +11,7 @@ module Gitlab
end
def icon
- 'icon_status_success'
+ 'status_success'
end
def favicon
diff --git a/lib/gitlab/ci/status/success_warning.rb b/lib/gitlab/ci/status/success_warning.rb
index 4d7d82e04cf..32b4cf43e48 100644
--- a/lib/gitlab/ci/status/success_warning.rb
+++ b/lib/gitlab/ci/status/success_warning.rb
@@ -15,7 +15,7 @@ module Gitlab
end
def icon
- 'icon_status_warning'
+ 'status_warning'
end
def group
diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb
index 5b835bb669a..cedf4171ab1 100644
--- a/lib/gitlab/ci/trace.rb
+++ b/lib/gitlab/ci/trace.rb
@@ -1,6 +1,8 @@
module Gitlab
module Ci
class Trace
+ ArchiveError = Class.new(StandardError)
+
attr_reader :job
delegate :old_trace, to: :job
@@ -27,6 +29,12 @@ module Gitlab
end
end
+ def extract_sections
+ read do |stream|
+ stream.extract_sections
+ end
+ end
+
def set(data)
write do |stream|
data = job.hide_secrets(data)
@@ -46,12 +54,14 @@ module Gitlab
end
def exist?
- current_path.present? || old_trace.present?
+ trace_artifact&.exists? || current_path.present? || old_trace.present?
end
def read
stream = Gitlab::Ci::Trace::Stream.new do
- if current_path
+ if trace_artifact
+ trace_artifact.open
+ elsif current_path
File.open(current_path, "rb")
elsif old_trace
StringIO.new(old_trace)
@@ -76,6 +86,8 @@ module Gitlab
end
def erase!
+ trace_artifact&.destroy
+
paths.each do |trace_path|
FileUtils.rm(trace_path, force: true)
end
@@ -83,8 +95,53 @@ module Gitlab
job.erase_old_trace!
end
+ def archive!
+ raise ArchiveError, 'Already archived' if trace_artifact
+ raise ArchiveError, 'Job is not finished yet' unless job.complete?
+
+ if current_path
+ File.open(current_path) do |stream|
+ archive_stream!(stream)
+ FileUtils.rm(current_path)
+ end
+ elsif old_trace
+ StringIO.new(old_trace, 'rb').tap do |stream|
+ archive_stream!(stream)
+ job.erase_old_trace!
+ end
+ end
+ end
+
private
+ def archive_stream!(stream)
+ clone_file!(stream, JobArtifactUploader.workhorse_upload_path) do |clone_path|
+ create_job_trace!(job, clone_path)
+ end
+ end
+
+ def clone_file!(src_stream, temp_dir)
+ FileUtils.mkdir_p(temp_dir)
+ Dir.mktmpdir('tmp-trace', temp_dir) do |dir_path|
+ temp_path = File.join(dir_path, "job.log")
+ FileUtils.touch(temp_path)
+ size = IO.copy_stream(src_stream, temp_path)
+ raise ArchiveError, 'Failed to copy stream' unless size == src_stream.size
+
+ yield(temp_path)
+ end
+ end
+
+ def create_job_trace!(job, path)
+ File.open(path) do |stream|
+ job.create_job_artifacts_trace!(
+ project: job.project,
+ file_type: :trace,
+ file: stream,
+ file_sha256: Digest::SHA256.file(path).hexdigest)
+ end
+ end
+
def ensure_path
return current_path if current_path
@@ -131,6 +188,10 @@ module Gitlab
"#{job.id}.log"
) if job.project&.ci_id
end
+
+ def trace_artifact
+ job.job_artifacts_trace
+ end
end
end
end
diff --git a/lib/gitlab/ci/trace/section_parser.rb b/lib/gitlab/ci/trace/section_parser.rb
new file mode 100644
index 00000000000..9bb0166c9e3
--- /dev/null
+++ b/lib/gitlab/ci/trace/section_parser.rb
@@ -0,0 +1,97 @@
+module Gitlab
+ module Ci
+ class Trace
+ class SectionParser
+ def initialize(lines)
+ @lines = lines
+ end
+
+ def parse!
+ @markers = {}
+
+ @lines.each do |line, pos|
+ parse_line(line, pos)
+ end
+ end
+
+ def sections
+ sanitize_markers.map do |name, markers|
+ start_, end_ = markers
+
+ {
+ name: name,
+ byte_start: start_[:marker],
+ byte_end: end_[:marker],
+ date_start: start_[:timestamp],
+ date_end: end_[:timestamp]
+ }
+ end
+ end
+
+ private
+
+ def parse_line(line, line_start_position)
+ s = StringScanner.new(line)
+ until s.eos?
+ find_next_marker(s) do |scanner|
+ marker_begins_at = line_start_position + scanner.pointer
+
+ if scanner.scan(Gitlab::Regex.build_trace_section_regex)
+ marker_ends_at = line_start_position + scanner.pointer
+ handle_line(scanner[1], scanner[2].to_i, scanner[3], marker_begins_at, marker_ends_at)
+ true
+ else
+ false
+ end
+ end
+ end
+ end
+
+ def sanitize_markers
+ @markers.select do |_, markers|
+ markers.size == 2 && markers[0][:action] == :start && markers[1][:action] == :end
+ end
+ end
+
+ def handle_line(action, time, name, marker_start, marker_end)
+ action = action.to_sym
+ timestamp = Time.at(time).utc
+ marker = if action == :start
+ marker_end
+ else
+ marker_start
+ end
+
+ @markers[name] ||= []
+ @markers[name] << {
+ name: name,
+ action: action,
+ timestamp: timestamp,
+ marker: marker
+ }
+ end
+
+ def beginning_of_section_regex
+ @beginning_of_section_regex ||= /section_/.freeze
+ end
+
+ def find_next_marker(s)
+ beginning_of_section_len = 8
+ maybe_marker = s.exist?(beginning_of_section_regex)
+
+ if maybe_marker.nil?
+ s.terminate
+ else
+ # repositioning at the beginning of the match
+ s.pos += maybe_marker - beginning_of_section_len
+ if block_given?
+ good_marker = yield(s)
+ # if not a good marker: Consuming the matched beginning_of_section_regex
+ s.pos += beginning_of_section_len unless good_marker
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/trace/stream.rb b/lib/gitlab/ci/trace/stream.rb
index ab3408f48d6..d52194f688b 100644
--- a/lib/gitlab/ci/trace/stream.rb
+++ b/lib/gitlab/ci/trace/stream.rb
@@ -90,8 +90,25 @@ module Gitlab
# so we just silently ignore error for now
end
+ def extract_sections
+ return [] unless valid?
+
+ lines = to_enum(:each_line_with_pos)
+ parser = SectionParser.new(lines)
+
+ parser.parse!
+ parser.sections
+ end
+
private
+ def each_line_with_pos
+ stream.seek(0, IO::SEEK_SET)
+ stream.each_line do |line|
+ yield [line, stream.pos - line.bytesize]
+ end
+ end
+
def read_last_lines(limit)
to_enum(:reverse_line).first(limit).reverse.join
end
diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb
index 0bd78b03448..a7285ac8f9d 100644
--- a/lib/gitlab/ci/yaml_processor.rb
+++ b/lib/gitlab/ci/yaml_processor.rb
@@ -85,7 +85,7 @@ module Gitlab
begin
Gitlab::Ci::YamlProcessor.new(content)
nil
- rescue ValidationError, Psych::SyntaxError => e
+ rescue ValidationError => e
e.message
end
end
diff --git a/lib/gitlab/closing_issue_extractor.rb b/lib/gitlab/closing_issue_extractor.rb
index 243c1f1394d..7e7aaeeaa17 100644
--- a/lib/gitlab/closing_issue_extractor.rb
+++ b/lib/gitlab/closing_issue_extractor.rb
@@ -23,7 +23,8 @@ module Gitlab
@extractor.analyze(closing_statements.join(" "))
@extractor.issues.reject do |issue|
- @extractor.project.forked_from?(issue.project) # Don't extract issues on original project
+ # Don't extract issues from the project this project was forked from
+ @extractor.project.forked_from?(issue.project)
end
end
end
diff --git a/lib/gitlab/conflict/file.rb b/lib/gitlab/conflict/file.rb
index 98dfe900044..2a0cb640a14 100644
--- a/lib/gitlab/conflict/file.rb
+++ b/lib/gitlab/conflict/file.rb
@@ -4,82 +4,29 @@ module Gitlab
include Gitlab::Routing
include IconsHelper
- MissingResolution = Class.new(ResolutionError)
-
CONTEXT_LINES = 3
- attr_reader :merge_file_result, :their_path, :our_path, :our_mode, :merge_request, :repository
-
- def initialize(merge_file_result, conflict, merge_request:)
- @merge_file_result = merge_file_result
- @their_path = conflict[:theirs][:path]
- @our_path = conflict[:ours][:path]
- @our_mode = conflict[:ours][:mode]
- @merge_request = merge_request
- @repository = merge_request.project.repository
- @match_line_headers = {}
- end
-
- def content
- merge_file_result[:data]
- end
+ attr_reader :merge_request
- def our_blob
- @our_blob ||= repository.blob_at(merge_request.diff_refs.head_sha, our_path)
- end
+ # 'raw' holds the Gitlab::Git::Conflict::File that this instance wraps
+ attr_reader :raw
- def type
- lines unless @type
+ delegate :type, :content, :their_path, :our_path, :our_mode, :our_blob, :repository, to: :raw
- @type.inquiry
+ def initialize(raw, merge_request:)
+ @raw = raw
+ @merge_request = merge_request
+ @match_line_headers = {}
end
- # Array of Gitlab::Diff::Line objects
def lines
return @lines if defined?(@lines)
- begin
- @type = 'text'
- @lines = Gitlab::Conflict::Parser.new.parse(content,
- our_path: our_path,
- their_path: their_path,
- parent_file: self)
- rescue Gitlab::Conflict::Parser::ParserError
- @type = 'text-editor'
- @lines = nil
- end
+ @lines = raw.lines.nil? ? nil : map_raw_lines(raw.lines)
end
def resolve_lines(resolution)
- section_id = nil
-
- lines.map do |line|
- unless line.type
- section_id = nil
- next line
- end
-
- section_id ||= line_code(line)
-
- case resolution[section_id]
- when 'head'
- next unless line.type == 'new'
- when 'origin'
- next unless line.type == 'old'
- else
- raise MissingResolution, "Missing resolution for section ID: #{section_id}"
- end
-
- line
- end.compact
- end
-
- def resolve_content(resolution)
- if resolution == content
- raise MissingResolution, "Resolved content has no changes for file #{our_path}"
- end
-
- resolution
+ map_raw_lines(raw.resolve_lines(resolution))
end
def highlight_lines!
@@ -163,7 +110,7 @@ module Gitlab
end
def line_code(line)
- Gitlab::Diff::LineCode.generate(our_path, line.new_pos, line.old_pos)
+ Gitlab::Git.diff_line_code(our_path, line.new_pos, line.old_pos)
end
def create_match_line(line)
@@ -227,15 +174,14 @@ module Gitlab
new_path: our_path)
end
- # Don't try to print merge_request or repository.
- def inspect
- instance_variables = [:merge_file_result, :their_path, :our_path, :our_mode, :type].map do |instance_variable|
- value = instance_variable_get("@#{instance_variable}")
+ private
- "#{instance_variable}=\"#{value}\""
+ def map_raw_lines(raw_lines)
+ raw_lines.map do |raw_line|
+ Gitlab::Diff::Line.new(raw_line[:full_line], raw_line[:type],
+ raw_line[:line_obj_index], raw_line[:line_old],
+ raw_line[:line_new], parent_file: self)
end
-
- "#<#{self.class} #{instance_variables.join(' ')}>"
end
end
end
diff --git a/lib/gitlab/conflict/file_collection.rb b/lib/gitlab/conflict/file_collection.rb
index 90f83e0f810..0a3ae2c3760 100644
--- a/lib/gitlab/conflict/file_collection.rb
+++ b/lib/gitlab/conflict/file_collection.rb
@@ -1,48 +1,32 @@
module Gitlab
module Conflict
class FileCollection
- ConflictSideMissing = Class.new(StandardError)
-
- attr_reader :merge_request, :our_commit, :their_commit, :project
-
- delegate :repository, to: :project
-
- class << self
- # We can only write when getting the merge index from the source
- # project, because we will write to that project. We don't use this all
- # the time because this fetches a ref into the source project, which
- # isn't needed for reading.
- def for_resolution(merge_request)
- project = merge_request.source_project
-
- new(merge_request, project).tap do |file_collection|
- project
- .repository
- .with_repo_branch_commit(merge_request.target_project.repository.raw_repository, merge_request.target_branch) do
-
- yield file_collection
- end
- end
- end
-
- # We don't need to do `with_repo_branch_commit` here, because the target
- # project always fetches source refs when creating merge request diffs.
- def read_only(merge_request)
- new(merge_request, merge_request.target_project)
- end
+ attr_reader :merge_request, :resolver
+
+ def initialize(merge_request)
+ our_commit = merge_request.source_branch_head.raw
+ their_commit = merge_request.target_branch_head.raw
+ target_repo = merge_request.target_project.repository.raw
+ @source_repo = merge_request.source_project.repository.raw
+ @resolver = Gitlab::Git::Conflict::Resolver.new(target_repo, our_commit.id, their_commit.id)
+ @merge_request = merge_request
end
- def merge_index
- @merge_index ||= repository.rugged.merge_commits(our_commit, their_commit)
+ def resolve(user, commit_message, files)
+ msg = commit_message || default_commit_message
+ resolution = Gitlab::Git::Conflict::Resolution.new(user, files, msg)
+ args = {
+ source_branch: merge_request.source_branch,
+ target_branch: merge_request.target_branch
+ }
+ resolver.resolve_conflicts(@source_repo, resolution, args)
+ ensure
+ @merge_request.clear_memoized_shas
end
def files
- @files ||= merge_index.conflicts.map do |conflict|
- raise ConflictSideMissing unless conflict[:theirs] && conflict[:ours]
-
- Gitlab::Conflict::File.new(merge_index.merge_file(conflict[:ours][:path]),
- conflict,
- merge_request: merge_request)
+ @files ||= resolver.conflicts.map do |conflict_file|
+ Gitlab::Conflict::File.new(conflict_file, merge_request: merge_request)
end
end
@@ -61,8 +45,8 @@ module Gitlab
end
def default_commit_message
- conflict_filenames = merge_index.conflicts.map do |conflict|
- "# #{conflict[:ours][:path]}"
+ conflict_filenames = files.map do |conflict|
+ "# #{conflict.our_path}"
end
<<EOM.chomp
@@ -72,15 +56,6 @@ Merge branch '#{merge_request.target_branch}' into '#{merge_request.source_branc
#{conflict_filenames.join("\n")}
EOM
end
-
- private
-
- def initialize(merge_request, project)
- @merge_request = merge_request
- @our_commit = merge_request.source_branch_head.raw.rugged_commit
- @their_commit = merge_request.target_branch_head.raw.rugged_commit
- @project = project
- end
end
end
end
diff --git a/lib/gitlab/conflict/parser.rb b/lib/gitlab/conflict/parser.rb
deleted file mode 100644
index e3678c914db..00000000000
--- a/lib/gitlab/conflict/parser.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-module Gitlab
- module Conflict
- class Parser
- UnresolvableError = Class.new(StandardError)
- UnmergeableFile = Class.new(UnresolvableError)
- UnsupportedEncoding = Class.new(UnresolvableError)
-
- # Recoverable errors - the conflict can be resolved in an editor, but not with
- # sections.
- ParserError = Class.new(StandardError)
- UnexpectedDelimiter = Class.new(ParserError)
- MissingEndDelimiter = Class.new(ParserError)
-
- def parse(text, our_path:, their_path:, parent_file: nil)
- validate_text!(text)
-
- line_obj_index = 0
- line_old = 1
- line_new = 1
- type = nil
- lines = []
- conflict_start = "<<<<<<< #{our_path}"
- conflict_middle = '======='
- conflict_end = ">>>>>>> #{their_path}"
-
- text.each_line.map do |line|
- full_line = line.delete("\n")
-
- if full_line == conflict_start
- validate_delimiter!(type.nil?)
-
- type = 'new'
- elsif full_line == conflict_middle
- validate_delimiter!(type == 'new')
-
- type = 'old'
- elsif full_line == conflict_end
- validate_delimiter!(type == 'old')
-
- type = nil
- elsif line[0] == '\\'
- type = 'nonewline'
- lines << Gitlab::Diff::Line.new(full_line, type, line_obj_index, line_old, line_new, parent_file: parent_file)
- else
- lines << Gitlab::Diff::Line.new(full_line, type, line_obj_index, line_old, line_new, parent_file: parent_file)
- line_old += 1 if type != 'new'
- line_new += 1 if type != 'old'
-
- line_obj_index += 1
- end
- end
-
- raise MissingEndDelimiter unless type.nil?
-
- lines
- end
-
- private
-
- def validate_text!(text)
- raise UnmergeableFile if text.blank? # Typically a binary file
- raise UnmergeableFile if text.length > 200.kilobytes
-
- text.force_encoding('UTF-8')
-
- raise UnsupportedEncoding unless text.valid_encoding?
- end
-
- def validate_delimiter!(condition)
- raise UnexpectedDelimiter unless condition
- end
- end
- end
-end
diff --git a/lib/gitlab/conflict/resolution_error.rb b/lib/gitlab/conflict/resolution_error.rb
deleted file mode 100644
index 0b61256b35a..00000000000
--- a/lib/gitlab/conflict/resolution_error.rb
+++ /dev/null
@@ -1,5 +0,0 @@
-module Gitlab
- module Conflict
- ResolutionError = Class.new(StandardError)
- end
-end
diff --git a/lib/gitlab/contributions_calendar.rb b/lib/gitlab/contributions_calendar.rb
index 0735243e021..02d3763514e 100644
--- a/lib/gitlab/contributions_calendar.rb
+++ b/lib/gitlab/contributions_calendar.rb
@@ -23,7 +23,7 @@ module Gitlab
mr_events = event_counts(date_from, :merge_requests)
.having(action: [Event::MERGED, Event::CREATED, Event::CLOSED], target_type: "MergeRequest")
note_events = event_counts(date_from, :merge_requests)
- .having(action: [Event::COMMENTED], target_type: "Note")
+ .having(action: [Event::COMMENTED], target_type: %w(Note DiffNote))
union = Gitlab::SQL::Union.new([repo_events, issue_events, mr_events, note_events])
events = Event.find_by_sql(union.to_sql).map(&:attributes)
@@ -34,6 +34,8 @@ module Gitlab
end
def events_by_date(date)
+ return Event.none unless can_read_cross_project?
+
events = Event.contributions.where(author_id: contributor.id)
.where(created_at: date.beginning_of_day..date.end_of_day)
.where(project_id: projects)
@@ -53,6 +55,10 @@ module Gitlab
private
+ def can_read_cross_project?
+ Ability.allowed?(current_user, :read_cross_project)
+ end
+
def event_counts(date_from, feature)
t = Event.arel_table
diff --git a/lib/gitlab/cross_project_access.rb b/lib/gitlab/cross_project_access.rb
new file mode 100644
index 00000000000..6eaed51b64c
--- /dev/null
+++ b/lib/gitlab/cross_project_access.rb
@@ -0,0 +1,67 @@
+module Gitlab
+ class CrossProjectAccess
+ class << self
+ delegate :add_check, :find_check, :checks,
+ to: :instance
+ end
+
+ def self.instance
+ @instance ||= new
+ end
+
+ attr_reader :checks
+
+ def initialize
+ @checks = {}
+ end
+
+ def add_check(
+ klass,
+ actions: {},
+ positive_condition: nil,
+ negative_condition: nil,
+ skip: false)
+
+ new_check = CheckInfo.new(actions,
+ positive_condition,
+ negative_condition,
+ skip
+ )
+
+ @checks[klass] ||= Gitlab::CrossProjectAccess::CheckCollection.new
+ @checks[klass].add_check(new_check)
+ recalculate_checks_for_class(klass)
+
+ @checks[klass]
+ end
+
+ def find_check(object)
+ @cached_checks ||= Hash.new do |cache, new_class|
+ parent_classes = @checks.keys.select { |existing_class| new_class <= existing_class }
+ closest_class = closest_parent(parent_classes, new_class)
+ cache[new_class] = @checks[closest_class]
+ end
+
+ @cached_checks[object.class]
+ end
+
+ private
+
+ def recalculate_checks_for_class(klass)
+ new_collection = @checks[klass]
+
+ @checks.each do |existing_class, existing_check_collection|
+ if existing_class < klass
+ existing_check_collection.add_collection(new_collection)
+ elsif klass < existing_class
+ new_collection.add_collection(existing_check_collection)
+ end
+ end
+ end
+
+ def closest_parent(classes, subject)
+ relevant_ancestors = subject.ancestors & classes
+ relevant_ancestors.first
+ end
+ end
+end
diff --git a/lib/gitlab/cross_project_access/check_collection.rb b/lib/gitlab/cross_project_access/check_collection.rb
new file mode 100644
index 00000000000..88376232065
--- /dev/null
+++ b/lib/gitlab/cross_project_access/check_collection.rb
@@ -0,0 +1,47 @@
+module Gitlab
+ class CrossProjectAccess
+ class CheckCollection
+ attr_reader :checks
+
+ def initialize
+ @checks = []
+ end
+
+ def add_collection(collection)
+ @checks |= collection.checks
+ end
+
+ def add_check(check)
+ @checks << check
+ end
+
+ def should_run?(object)
+ skips, runs = arranged_checks
+
+ # If one rule tells us to skip, we skip the cross project check
+ return false if skips.any? { |check| check.should_skip?(object) }
+
+ # If the rule isn't skipped, we run it if any of the checks says we
+ # should run
+ runs.any? { |check| check.should_run?(object) }
+ end
+
+ def arranged_checks
+ return [@skips, @runs] if @skips && @runs
+
+ @skips = []
+ @runs = []
+
+ @checks.each do |check|
+ if check.skip
+ @skips << check
+ else
+ @runs << check
+ end
+ end
+
+ [@skips, @runs]
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/cross_project_access/check_info.rb b/lib/gitlab/cross_project_access/check_info.rb
new file mode 100644
index 00000000000..e8a845c7f1e
--- /dev/null
+++ b/lib/gitlab/cross_project_access/check_info.rb
@@ -0,0 +1,66 @@
+module Gitlab
+ class CrossProjectAccess
+ class CheckInfo
+ attr_accessor :actions, :positive_condition, :negative_condition, :skip
+
+ def initialize(actions, positive_condition, negative_condition, skip)
+ @actions = actions
+ @positive_condition = positive_condition
+ @negative_condition = negative_condition
+ @skip = skip
+ end
+
+ def should_skip?(object)
+ return !should_run?(object) unless @skip
+
+ skip_for_action = @actions[current_action(object)]
+ skip_for_action = false if @actions[current_action(object)].nil?
+
+ # We need to do the opposite of what was defined in the following cases:
+ # - skip_cross_project_access_check index: true, if: -> { false }
+ # - skip_cross_project_access_check index: true, unless: -> { true }
+ if positive_condition_is_false?(object)
+ skip_for_action = !skip_for_action
+ end
+
+ if negative_condition_is_true?(object)
+ skip_for_action = !skip_for_action
+ end
+
+ skip_for_action
+ end
+
+ def should_run?(object)
+ return !should_skip?(object) if @skip
+
+ run_for_action = @actions[current_action(object)]
+ run_for_action = true if @actions[current_action(object)].nil?
+
+ # We need to do the opposite of what was defined in the following cases:
+ # - requires_cross_project_access index: true, if: -> { false }
+ # - requires_cross_project_access index: true, unless: -> { true }
+ if positive_condition_is_false?(object)
+ run_for_action = !run_for_action
+ end
+
+ if negative_condition_is_true?(object)
+ run_for_action = !run_for_action
+ end
+
+ run_for_action
+ end
+
+ def positive_condition_is_false?(object)
+ @positive_condition && !object.instance_exec(&@positive_condition)
+ end
+
+ def negative_condition_is_true?(object)
+ @negative_condition && object.instance_exec(&@negative_condition)
+ end
+
+ def current_action(object)
+ object.respond_to?(:action_name) ? object.action_name.to_sym : nil
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/cross_project_access/class_methods.rb b/lib/gitlab/cross_project_access/class_methods.rb
new file mode 100644
index 00000000000..90eac94800c
--- /dev/null
+++ b/lib/gitlab/cross_project_access/class_methods.rb
@@ -0,0 +1,48 @@
+module Gitlab
+ class CrossProjectAccess
+ module ClassMethods
+ def requires_cross_project_access(*args)
+ positive_condition, negative_condition, actions = extract_params(args)
+
+ Gitlab::CrossProjectAccess.add_check(
+ self,
+ actions: actions,
+ positive_condition: positive_condition,
+ negative_condition: negative_condition
+ )
+ end
+
+ def skip_cross_project_access_check(*args)
+ positive_condition, negative_condition, actions = extract_params(args)
+
+ Gitlab::CrossProjectAccess.add_check(
+ self,
+ actions: actions,
+ positive_condition: positive_condition,
+ negative_condition: negative_condition,
+ skip: true
+ )
+ end
+
+ private
+
+ def extract_params(args)
+ actions = {}
+ positive_condition = nil
+ negative_condition = nil
+
+ args.each do |argument|
+ if argument.is_a?(Hash)
+ positive_condition = argument.delete(:if)
+ negative_condition = argument.delete(:unless)
+ actions.merge!(argument)
+ else
+ actions[argument] = true
+ end
+ end
+
+ [positive_condition, negative_condition, actions]
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/current_settings.rb b/lib/gitlab/current_settings.rb
index 642f0944354..b7c596a973d 100644
--- a/lib/gitlab/current_settings.rb
+++ b/lib/gitlab/current_settings.rb
@@ -1,73 +1,79 @@
module Gitlab
module CurrentSettings
- extend self
+ class << self
+ def current_application_settings
+ if RequestStore.active?
+ RequestStore.fetch(:current_application_settings) { ensure_application_settings! }
+ else
+ ensure_application_settings!
+ end
+ end
- def current_application_settings
- if RequestStore.active?
- RequestStore.fetch(:current_application_settings) { ensure_application_settings! }
- else
- ensure_application_settings!
+ def fake_application_settings(defaults = ::ApplicationSetting.defaults)
+ Gitlab::FakeApplicationSettings.new(defaults)
end
- end
- delegate :sidekiq_throttling_enabled?, to: :current_application_settings
+ def method_missing(name, *args, &block)
+ current_application_settings.send(name, *args, &block) # rubocop:disable GitlabSecurity/PublicSend
+ end
- def fake_application_settings(defaults = ::ApplicationSetting.defaults)
- FakeApplicationSettings.new(defaults)
- end
+ def respond_to_missing?(name, include_private = false)
+ current_application_settings.respond_to?(name, include_private) || super
+ end
- private
+ private
- def ensure_application_settings!
- return in_memory_application_settings if ENV['IN_MEMORY_APPLICATION_SETTINGS'] == 'true'
+ def ensure_application_settings!
+ return in_memory_application_settings if ENV['IN_MEMORY_APPLICATION_SETTINGS'] == 'true'
- cached_application_settings || uncached_application_settings
- end
+ cached_application_settings || uncached_application_settings
+ end
- def cached_application_settings
- begin
- ::ApplicationSetting.cached
- rescue ::Redis::BaseError, ::Errno::ENOENT, ::Errno::EADDRNOTAVAIL
- # In case Redis isn't running or the Redis UNIX socket file is not available
+ def cached_application_settings
+ begin
+ ::ApplicationSetting.cached
+ rescue ::Redis::BaseError, ::Errno::ENOENT, ::Errno::EADDRNOTAVAIL
+ # In case Redis isn't running or the Redis UNIX socket file is not available
+ end
end
- end
- def uncached_application_settings
- return fake_application_settings unless connect_to_db?
+ def uncached_application_settings
+ return fake_application_settings unless connect_to_db?
- db_settings = ::ApplicationSetting.current
+ db_settings = ::ApplicationSetting.current
- # If there are pending migrations, it's possible there are columns that
- # need to be added to the application settings. To prevent Rake tasks
- # and other callers from failing, use any loaded settings and return
- # defaults for missing columns.
- if ActiveRecord::Migrator.needs_migration?
- defaults = ::ApplicationSetting.defaults
- defaults.merge!(db_settings.attributes.symbolize_keys) if db_settings.present?
- return fake_application_settings(defaults)
- end
+ # If there are pending migrations, it's possible there are columns that
+ # need to be added to the application settings. To prevent Rake tasks
+ # and other callers from failing, use any loaded settings and return
+ # defaults for missing columns.
+ if ActiveRecord::Migrator.needs_migration?
+ defaults = ::ApplicationSetting.defaults
+ defaults.merge!(db_settings.attributes.symbolize_keys) if db_settings.present?
+ return fake_application_settings(defaults)
+ end
- return db_settings if db_settings.present?
+ return db_settings if db_settings.present?
- ::ApplicationSetting.create_from_defaults || in_memory_application_settings
- end
+ ::ApplicationSetting.create_from_defaults || in_memory_application_settings
+ end
- def in_memory_application_settings
- @in_memory_application_settings ||= ::ApplicationSetting.new(::ApplicationSetting.defaults)
- rescue ActiveRecord::StatementInvalid, ActiveRecord::UnknownAttributeError
- # In case migrations the application_settings table is not created yet,
- # we fallback to a simple OpenStruct
- fake_application_settings
- end
+ def in_memory_application_settings
+ @in_memory_application_settings ||= ::ApplicationSetting.new(::ApplicationSetting.defaults) # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ rescue ActiveRecord::StatementInvalid, ActiveRecord::UnknownAttributeError
+ # In case migrations the application_settings table is not created yet,
+ # we fallback to a simple OpenStruct
+ fake_application_settings
+ end
- def connect_to_db?
- # When the DBMS is not available, an exception (e.g. PG::ConnectionBad) is raised
- active_db_connection = ActiveRecord::Base.connection.active? rescue false
+ def connect_to_db?
+ # When the DBMS is not available, an exception (e.g. PG::ConnectionBad) is raised
+ active_db_connection = ActiveRecord::Base.connection.active? rescue false
- active_db_connection &&
- ActiveRecord::Base.connection.table_exists?('application_settings')
- rescue ActiveRecord::NoDatabaseError
- false
+ active_db_connection &&
+ ActiveRecord::Base.connection.table_exists?('application_settings')
+ rescue ActiveRecord::NoDatabaseError
+ false
+ end
end
end
end
diff --git a/lib/gitlab/cycle_analytics/base_event_fetcher.rb b/lib/gitlab/cycle_analytics/base_event_fetcher.rb
index ab115afcaa5..e3e3767cc75 100644
--- a/lib/gitlab/cycle_analytics/base_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/base_event_fetcher.rb
@@ -56,7 +56,9 @@ module Gitlab
end
def allowed_ids
- nil
+ @allowed_ids ||= allowed_ids_finder_class
+ .new(@options[:current_user], project_id: @project.id)
+ .execute.where(id: event_result_ids).pluck(:id)
end
def event_result_ids
diff --git a/lib/gitlab/cycle_analytics/base_query.rb b/lib/gitlab/cycle_analytics/base_query.rb
index 58729d3ced8..86d708be0d6 100644
--- a/lib/gitlab/cycle_analytics/base_query.rb
+++ b/lib/gitlab/cycle_analytics/base_query.rb
@@ -8,15 +8,15 @@ module Gitlab
private
def base_query
- @base_query ||= stage_query
+ @base_query ||= stage_query(@project.id) # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
- def stage_query
+ def stage_query(project_ids)
query = mr_closing_issues_table.join(issue_table).on(issue_table[:id].eq(mr_closing_issues_table[:issue_id]))
.join(issue_metrics_table).on(issue_table[:id].eq(issue_metrics_table[:issue_id]))
- .where(issue_table[:project_id].eq(@project.id))
- .where(issue_table[:deleted_at].eq(nil))
- .where(issue_table[:created_at].gteq(@options[:from]))
+ .project(issue_table[:project_id].as("project_id"))
+ .where(issue_table[:project_id].in(project_ids))
+ .where(issue_table[:created_at].gteq(@options[:from])) # rubocop:disable Gitlab/ModuleWithInstanceVariables
# Load merge_requests
query = query.join(mr_table, Arel::Nodes::OuterJoin)
diff --git a/lib/gitlab/cycle_analytics/base_stage.rb b/lib/gitlab/cycle_analytics/base_stage.rb
index cac31ea8cff..038d5a19bc4 100644
--- a/lib/gitlab/cycle_analytics/base_stage.rb
+++ b/lib/gitlab/cycle_analytics/base_stage.rb
@@ -21,17 +21,28 @@ module Gitlab
end
def median
- cte_table = Arel::Table.new("cte_table_for_#{name}")
+ BatchLoader.for(@project.id).batch(key: name) do |project_ids, loader|
+ cte_table = Arel::Table.new("cte_table_for_#{name}")
- # Build a `SELECT` query. We find the first of the `end_time_attrs` that isn't `NULL` (call this end_time).
- # Next, we find the first of the start_time_attrs that isn't `NULL` (call this start_time).
- # We compute the (end_time - start_time) interval, and give it an alias based on the current
- # cycle analytics stage.
- interval_query = Arel::Nodes::As.new(
- cte_table,
- subtract_datetimes(base_query.dup, start_time_attrs, end_time_attrs, name.to_s))
+ # Build a `SELECT` query. We find the first of the `end_time_attrs` that isn't `NULL` (call this end_time).
+ # Next, we find the first of the start_time_attrs that isn't `NULL` (call this start_time).
+ # We compute the (end_time - start_time) interval, and give it an alias based on the current
+ # cycle analytics stage.
+ interval_query = Arel::Nodes::As.new(cte_table,
+ subtract_datetimes(stage_query(project_ids), start_time_attrs, end_time_attrs, name.to_s))
- median_datetime(cte_table, interval_query, name)
+ if project_ids.one?
+ loader.call(@project.id, median_datetime(cte_table, interval_query, name))
+ else
+ begin
+ median_datetimes(cte_table, interval_query, name, :project_id)&.each do |project_id, median|
+ loader.call(project_id, median)
+ end
+ rescue NotSupportedError
+ {}
+ end
+ end
+ end
end
def name
diff --git a/lib/gitlab/cycle_analytics/code_event_fetcher.rb b/lib/gitlab/cycle_analytics/code_event_fetcher.rb
index d5bf6149749..06357c9b377 100644
--- a/lib/gitlab/cycle_analytics/code_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/code_event_fetcher.rb
@@ -1,8 +1,6 @@
module Gitlab
module CycleAnalytics
class CodeEventFetcher < BaseEventFetcher
- include MergeRequestAllowed
-
def initialize(*args)
@projections = [mr_table[:title],
mr_table[:iid],
@@ -20,6 +18,10 @@ module Gitlab
def serialize(event)
AnalyticsMergeRequestSerializer.new(project: @project).represent(event)
end
+
+ def allowed_ids_finder_class
+ MergeRequestsFinder
+ end
end
end
end
diff --git a/lib/gitlab/cycle_analytics/issue_allowed.rb b/lib/gitlab/cycle_analytics/issue_allowed.rb
deleted file mode 100644
index a7652a70641..00000000000
--- a/lib/gitlab/cycle_analytics/issue_allowed.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-module Gitlab
- module CycleAnalytics
- module IssueAllowed
- def allowed_ids
- @allowed_ids ||= IssuesFinder.new(@options[:current_user], project_id: @project.id).execute.where(id: event_result_ids).pluck(:id)
- end
- end
- end
-end
diff --git a/lib/gitlab/cycle_analytics/issue_event_fetcher.rb b/lib/gitlab/cycle_analytics/issue_event_fetcher.rb
index 3df9cbdcfce..1754f91dccb 100644
--- a/lib/gitlab/cycle_analytics/issue_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/issue_event_fetcher.rb
@@ -1,8 +1,6 @@
module Gitlab
module CycleAnalytics
class IssueEventFetcher < BaseEventFetcher
- include IssueAllowed
-
def initialize(*args)
@projections = [issue_table[:title],
issue_table[:iid],
@@ -18,6 +16,10 @@ module Gitlab
def serialize(event)
AnalyticsIssueSerializer.new(project: @project).represent(event)
end
+
+ def allowed_ids_finder_class
+ IssuesFinder
+ end
end
end
end
diff --git a/lib/gitlab/cycle_analytics/merge_request_allowed.rb b/lib/gitlab/cycle_analytics/merge_request_allowed.rb
deleted file mode 100644
index 28f6db44759..00000000000
--- a/lib/gitlab/cycle_analytics/merge_request_allowed.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-module Gitlab
- module CycleAnalytics
- module MergeRequestAllowed
- def allowed_ids
- @allowed_ids ||= MergeRequestsFinder.new(@options[:current_user], project_id: @project.id).execute.where(id: event_result_ids).pluck(:id)
- end
- end
- end
-end
diff --git a/lib/gitlab/cycle_analytics/plan_event_fetcher.rb b/lib/gitlab/cycle_analytics/plan_event_fetcher.rb
index 2479b4a7706..086203b9ccc 100644
--- a/lib/gitlab/cycle_analytics/plan_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/plan_event_fetcher.rb
@@ -3,7 +3,6 @@ module Gitlab
class PlanEventFetcher < BaseEventFetcher
def initialize(*args)
@projections = [mr_diff_table[:id],
- mr_diff_table[:st_commits],
issue_metrics_table[:first_mentioned_in_commit_at]]
super(*args)
@@ -19,6 +18,10 @@ module Gitlab
private
+ def allowed_ids
+ nil
+ end
+
def merge_request_diff_commits
@merge_request_diff_commits ||=
MergeRequestDiffCommit
@@ -37,12 +40,7 @@ module Gitlab
def first_time_reference_commit(event)
return nil unless event && merge_request_diff_commits
- commits =
- if event['st_commits'].present?
- YAML.load(event['st_commits'])
- else
- merge_request_diff_commits[event['id'].to_i]
- end
+ commits = merge_request_diff_commits[event['id'].to_i]
return nil if commits.blank?
diff --git a/lib/gitlab/cycle_analytics/production_helper.rb b/lib/gitlab/cycle_analytics/production_helper.rb
index d693443bfa4..d0ca62e46e4 100644
--- a/lib/gitlab/cycle_analytics/production_helper.rb
+++ b/lib/gitlab/cycle_analytics/production_helper.rb
@@ -1,8 +1,10 @@
module Gitlab
module CycleAnalytics
module ProductionHelper
- def stage_query
- super.where(mr_metrics_table[:first_deployed_to_production_at].gteq(@options[:from]))
+ def stage_query(project_ids)
+ super(project_ids)
+ .where(mr_metrics_table[:first_deployed_to_production_at]
+ .gteq(@options[:from])) # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end
end
diff --git a/lib/gitlab/cycle_analytics/review_event_fetcher.rb b/lib/gitlab/cycle_analytics/review_event_fetcher.rb
index 4c7b3f4467f..dada819a2a8 100644
--- a/lib/gitlab/cycle_analytics/review_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/review_event_fetcher.rb
@@ -1,8 +1,6 @@
module Gitlab
module CycleAnalytics
class ReviewEventFetcher < BaseEventFetcher
- include MergeRequestAllowed
-
def initialize(*args)
@projections = [mr_table[:title],
mr_table[:iid],
@@ -14,9 +12,15 @@ module Gitlab
super(*args)
end
+ private
+
def serialize(event)
AnalyticsMergeRequestSerializer.new(project: @project).represent(event)
end
+
+ def allowed_ids_finder_class
+ MergeRequestsFinder
+ end
end
end
end
diff --git a/lib/gitlab/cycle_analytics/staging_event_fetcher.rb b/lib/gitlab/cycle_analytics/staging_event_fetcher.rb
index 36c0260dbfe..2f014153ca5 100644
--- a/lib/gitlab/cycle_analytics/staging_event_fetcher.rb
+++ b/lib/gitlab/cycle_analytics/staging_event_fetcher.rb
@@ -22,6 +22,10 @@ module Gitlab
private
+ def allowed_ids
+ nil
+ end
+
def serialize(event)
AnalyticsBuildSerializer.new.represent(event['build'])
end
diff --git a/lib/gitlab/cycle_analytics/test_stage.rb b/lib/gitlab/cycle_analytics/test_stage.rb
index 2b5f72bef89..0e9d235ca79 100644
--- a/lib/gitlab/cycle_analytics/test_stage.rb
+++ b/lib/gitlab/cycle_analytics/test_stage.rb
@@ -25,11 +25,11 @@ module Gitlab
_("Total test time for all commits/merges")
end
- def stage_query
+ def stage_query(project_ids)
if @options[:branch]
- super.where(build_table[:ref].eq(@options[:branch]))
+ super(project_ids).where(build_table[:ref].eq(@options[:branch]))
else
- super
+ super(project_ids)
end
end
end
diff --git a/lib/gitlab/cycle_analytics/usage_data.rb b/lib/gitlab/cycle_analytics/usage_data.rb
new file mode 100644
index 00000000000..5122e3417ca
--- /dev/null
+++ b/lib/gitlab/cycle_analytics/usage_data.rb
@@ -0,0 +1,72 @@
+module Gitlab
+ module CycleAnalytics
+ class UsageData
+ PROJECTS_LIMIT = 10
+
+ attr_reader :projects, :options
+
+ def initialize
+ @projects = Project.sorted_by_activity.limit(PROJECTS_LIMIT)
+ @options = { from: 7.days.ago }
+ end
+
+ def to_json
+ total = 0
+
+ values =
+ medians_per_stage.each_with_object({}) do |(stage_name, medians), hsh|
+ calculations = stage_values(medians)
+
+ total += calculations.values.compact.sum
+ hsh[stage_name] = calculations
+ end
+
+ values[:total] = total
+
+ { avg_cycle_analytics: values }
+ end
+
+ private
+
+ def medians_per_stage
+ projects.each_with_object({}) do |project, hsh|
+ ::CycleAnalytics.new(project, options).all_medians_per_stage.each do |stage_name, median|
+ hsh[stage_name] ||= []
+ hsh[stage_name] << median
+ end
+ end
+ end
+
+ def stage_values(medians)
+ medians = medians.map(&:presence).compact
+ average = calc_average(medians)
+
+ {
+ average: average,
+ sd: standard_deviation(medians, average),
+ missing: projects.length - medians.length
+ }
+ end
+
+ def calc_average(values)
+ return if values.empty?
+
+ (values.sum / values.length).to_i
+ end
+
+ def standard_deviation(values, average)
+ Math.sqrt(sample_variance(values, average)).to_i
+ end
+
+ def sample_variance(values, average)
+ return 0 if values.length <= 1
+
+ sum = values.inject(0) do |acc, val|
+ acc + (val - average)**2
+ end
+
+ sum / (values.length - 1)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/daemon.rb b/lib/gitlab/daemon.rb
index dfd17e35707..633de9f9776 100644
--- a/lib/gitlab/daemon.rb
+++ b/lib/gitlab/daemon.rb
@@ -2,6 +2,7 @@ module Gitlab
class Daemon
def self.initialize_instance(*args)
raise "#{name} singleton instance already initialized" if @instance
+
@instance = new(*args)
Kernel.at_exit(&@instance.method(:stop))
@instance
@@ -43,7 +44,7 @@ module Gitlab
if thread
thread.wakeup if thread.alive?
- thread.join
+ thread.join unless Thread.current == thread
@thread = nil
end
end
diff --git a/lib/gitlab/data_builder/push.rb b/lib/gitlab/data_builder/push.rb
index 4ab5b3455a5..c169c8fe135 100644
--- a/lib/gitlab/data_builder/push.rb
+++ b/lib/gitlab/data_builder/push.rb
@@ -64,8 +64,11 @@ module Gitlab
# For performance purposes maximum 20 latest commits
# will be passed as post receive hook data.
- commit_attrs = commits_limited.map do |commit|
- commit.hook_attrs(with_changed_files: true)
+ # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/38259
+ commit_attrs = Gitlab::GitalyClient.allow_n_plus_1_calls do
+ commits_limited.map do |commit|
+ commit.hook_attrs(with_changed_files: true)
+ end
end
type = Gitlab::Git.tag_ref?(ref) ? 'tag_push' : 'push'
@@ -83,7 +86,7 @@ module Gitlab
user_name: user.name,
user_username: user.username,
user_email: user.email,
- user_avatar: user.avatar_url,
+ user_avatar: user.avatar_url(only_path: false),
project_id: project.id,
project: project.hook_attrs,
commits: commit_attrs,
diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb
index a6ec75da385..e51794fef99 100644
--- a/lib/gitlab/database.rb
+++ b/lib/gitlab/database.rb
@@ -4,6 +4,10 @@ module Gitlab
# https://www.postgresql.org/docs/9.2/static/datatype-numeric.html
# http://dev.mysql.com/doc/refman/5.7/en/integer-types.html
MAX_INT_VALUE = 2147483647
+ # The max value between MySQL's TIMESTAMP and PostgreSQL's timestampz:
+ # https://www.postgresql.org/docs/9.1/static/datatype-datetime.html
+ # https://dev.mysql.com/doc/refman/5.7/en/datetime.html
+ MAX_TIMESTAMP_VALUE = Time.at((1 << 31) - 1).freeze
def self.config
ActiveRecord::Base.configurations[Rails.env]
@@ -29,6 +33,15 @@ module Gitlab
adapter_name.casecmp('postgresql').zero?
end
+ # Overridden in EE
+ def self.read_only?
+ false
+ end
+
+ def self.read_write?
+ !self.read_only?
+ end
+
def self.version
database_version.match(/\A(?:PostgreSQL |)([^\s]+).*\z/)[1]
end
@@ -37,6 +50,10 @@ module Gitlab
postgresql? && version.to_f >= 9.3
end
+ def self.replication_slots_supported?
+ postgresql? && version.to_f >= 9.4
+ end
+
def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}"
@@ -95,20 +112,51 @@ module Gitlab
end
end
- def self.bulk_insert(table, rows)
+ # Bulk inserts a number of rows into a table, optionally returning their
+ # IDs.
+ #
+ # table - The name of the table to insert the rows into.
+ # rows - An Array of Hash instances, each mapping the columns to their
+ # values.
+ # return_ids - When set to true the return value will be an Array of IDs of
+ # the inserted rows, this only works on PostgreSQL.
+ # disable_quote - A key or an Array of keys to exclude from quoting (You
+ # become responsible for protection from SQL injection for
+ # these keys!)
+ def self.bulk_insert(table, rows, return_ids: false, disable_quote: [])
return if rows.empty?
keys = rows.first.keys
columns = keys.map { |key| connection.quote_column_name(key) }
+ return_ids = false if mysql?
+ disable_quote = Array(disable_quote).to_set
tuples = rows.map do |row|
- row.values_at(*keys).map { |value| connection.quote(value) }
+ keys.map do |k|
+ disable_quote.include?(k) ? row[k] : connection.quote(row[k])
+ end
end
- connection.execute <<-EOF
+ sql = <<-EOF
INSERT INTO #{table} (#{columns.join(', ')})
VALUES #{tuples.map { |tuple| "(#{tuple.join(', ')})" }.join(', ')}
EOF
+
+ if return_ids
+ sql << 'RETURNING id'
+ end
+
+ result = connection.execute(sql)
+
+ if return_ids
+ result.values.map { |tuple| tuple[0].to_i }
+ else
+ []
+ end
+ end
+
+ def self.sanitize_timestamp(timestamp)
+ MAX_TIMESTAMP_VALUE > timestamp ? timestamp : MAX_TIMESTAMP_VALUE.dup
end
# pool_size - The size of the DB pool.
diff --git a/lib/gitlab/database/grant.rb b/lib/gitlab/database/grant.rb
index aee3981e79a..d32837f5793 100644
--- a/lib/gitlab/database/grant.rb
+++ b/lib/gitlab/database/grant.rb
@@ -6,28 +6,46 @@ module Gitlab
if Database.postgresql?
'information_schema.role_table_grants'
else
- 'mysql.user'
+ 'information_schema.schema_privileges'
end
- def self.scope_to_current_user
- if Database.postgresql?
- where('grantee = user')
- else
- where("CONCAT(User, '@', Host) = current_user()")
- end
- end
-
# Returns true if the current user can create and execute triggers on the
# given table.
def self.create_and_execute_trigger?(table)
- priv =
- if Database.postgresql?
- where(privilege_type: 'TRIGGER', table_name: table)
- else
- where(Trigger_priv: 'Y')
+ if Database.postgresql?
+ # We _must not_ use quote_table_name as this will produce double
+ # quotes on PostgreSQL and for "has_table_privilege" we need single
+ # quotes.
+ quoted_table = connection.quote(table)
+
+ begin
+ from(nil)
+ .pluck("has_table_privilege(#{quoted_table}, 'TRIGGER')")
+ .first
+ rescue ActiveRecord::StatementInvalid
+ # This error is raised when using a non-existing table name. In this
+ # case we just want to return false as a user technically can't
+ # create triggers for such a table.
+ false
end
+ else
+ queries = [
+ Grant.select(1)
+ .from('information_schema.user_privileges')
+ .where("PRIVILEGE_TYPE = 'SUPER'")
+ .where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')"),
+
+ Grant.select(1)
+ .from('information_schema.schema_privileges')
+ .where("PRIVILEGE_TYPE = 'TRIGGER'")
+ .where('TABLE_SCHEMA = ?', Gitlab::Database.database_name)
+ .where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')")
+ ]
- priv.scope_to_current_user.any?
+ union = SQL::Union.new(queries).to_sql
+
+ Grant.from("(#{union}) privs").any?
+ end
end
end
end
diff --git a/lib/gitlab/database/median.rb b/lib/gitlab/database/median.rb
index 059054ac9ff..74fed447289 100644
--- a/lib/gitlab/database/median.rb
+++ b/lib/gitlab/database/median.rb
@@ -2,18 +2,14 @@
module Gitlab
module Database
module Median
+ NotSupportedError = Class.new(StandardError)
+
def median_datetime(arel_table, query_so_far, column_sym)
- median_queries =
- if Gitlab::Database.postgresql?
- pg_median_datetime_sql(arel_table, query_so_far, column_sym)
- elsif Gitlab::Database.mysql?
- mysql_median_datetime_sql(arel_table, query_so_far, column_sym)
- end
-
- results = Array.wrap(median_queries).map do |query|
- ActiveRecord::Base.connection.execute(query)
- end
- extract_median(results).presence
+ extract_median(execute_queries(arel_table, query_so_far, column_sym)).presence
+ end
+
+ def median_datetimes(arel_table, query_so_far, column_sym, partition_column)
+ extract_medians(execute_queries(arel_table, query_so_far, column_sym, partition_column)).presence
end
def extract_median(results)
@@ -21,13 +17,21 @@ module Gitlab
if Gitlab::Database.postgresql?
result = result.first.presence
- median = result['median'] if result
- median.to_f if median
+
+ result['median']&.to_f if result
elsif Gitlab::Database.mysql?
result.to_a.flatten.first
end
end
+ def extract_medians(results)
+ median_values = results.compact.first.values
+
+ median_values.each_with_object({}) do |(id, median), hash|
+ hash[id.to_i] = median&.to_f
+ end
+ end
+
def mysql_median_datetime_sql(arel_table, query_so_far, column_sym)
query = arel_table
.from(arel_table.project(Arel.sql('*')).order(arel_table[column_sym]).as(arel_table.table_name))
@@ -53,7 +57,7 @@ module Gitlab
]
end
- def pg_median_datetime_sql(arel_table, query_so_far, column_sym)
+ def pg_median_datetime_sql(arel_table, query_so_far, column_sym, partition_column = nil)
# Create a CTE with the column we're operating on, row number (after sorting by the column
# we're operating on), and count of the table we're operating on (duplicated across) all rows
# of the CTE. For example, if we're looking to find the median of the `projects.star_count`
@@ -64,41 +68,107 @@ module Gitlab
# 5 | 1 | 3
# 9 | 2 | 3
# 15 | 3 | 3
+ #
+ # If a partition column is used we will do the same operation but for separate partitions,
+ # when that happens the CTE might look like this:
+ #
+ # project_id | star_count | row_id | ct
+ # ------------+------------+--------+----
+ # 1 | 5 | 1 | 2
+ # 1 | 9 | 2 | 2
+ # 2 | 10 | 1 | 3
+ # 2 | 15 | 2 | 3
+ # 2 | 20 | 3 | 3
cte_table = Arel::Table.new("ordered_records")
+
cte = Arel::Nodes::As.new(
cte_table,
- arel_table
- .project(
- arel_table[column_sym].as(column_sym.to_s),
- Arel::Nodes::Over.new(Arel::Nodes::NamedFunction.new("row_number", []),
- Arel::Nodes::Window.new.order(arel_table[column_sym])).as('row_id'),
- arel_table.project("COUNT(1)").as('ct')).
+ arel_table.project(*rank_rows(arel_table, column_sym, partition_column)).
# Disallow negative values
where(arel_table[column_sym].gteq(zero_interval)))
# From the CTE, select either the middle row or the middle two rows (this is accomplished
# by 'where cte.row_id between cte.ct / 2.0 AND cte.ct / 2.0 + 1'). Find the average of the
# selected rows, and this is the median value.
- cte_table.project(average([extract_epoch(cte_table[column_sym])], "median"))
- .where(
- Arel::Nodes::Between.new(
- cte_table[:row_id],
- Arel::Nodes::And.new(
- [(cte_table[:ct] / Arel.sql('2.0')),
- (cte_table[:ct] / Arel.sql('2.0') + 1)]
+ result =
+ cte_table
+ .project(*median_projections(cte_table, column_sym, partition_column))
+ .where(
+ Arel::Nodes::Between.new(
+ cte_table[:row_id],
+ Arel::Nodes::And.new(
+ [(cte_table[:ct] / Arel.sql('2.0')),
+ (cte_table[:ct] / Arel.sql('2.0') + 1)]
+ )
)
)
- )
- .with(query_so_far, cte)
- .to_sql
+ .with(query_so_far, cte)
+
+ result.group(cte_table[partition_column]).order(cte_table[partition_column]) if partition_column
+
+ result.to_sql
end
private
+ def median_queries(arel_table, query_so_far, column_sym, partition_column = nil)
+ if Gitlab::Database.postgresql?
+ pg_median_datetime_sql(arel_table, query_so_far, column_sym, partition_column)
+ elsif Gitlab::Database.mysql?
+ raise NotSupportedError, "partition_column is not supported for MySQL" if partition_column
+
+ mysql_median_datetime_sql(arel_table, query_so_far, column_sym)
+ end
+ end
+
+ def execute_queries(arel_table, query_so_far, column_sym, partition_column = nil)
+ queries = median_queries(arel_table, query_so_far, column_sym, partition_column)
+
+ Array.wrap(queries).map { |query| ActiveRecord::Base.connection.execute(query) }
+ end
+
def average(args, as)
Arel::Nodes::NamedFunction.new("AVG", args, as)
end
+ def rank_rows(arel_table, column_sym, partition_column)
+ column_row = arel_table[column_sym].as(column_sym.to_s)
+
+ if partition_column
+ partition_row = arel_table[partition_column]
+ row_id =
+ Arel::Nodes::Over.new(
+ Arel::Nodes::NamedFunction.new('rank', []),
+ Arel::Nodes::Window.new.partition(arel_table[partition_column])
+ .order(arel_table[column_sym])
+ ).as('row_id')
+
+ count = arel_table.from(arel_table.alias)
+ .project('COUNT(*)')
+ .where(arel_table[partition_column].eq(arel_table.alias[partition_column]))
+ .as('ct')
+
+ [partition_row, column_row, row_id, count]
+ else
+ row_id =
+ Arel::Nodes::Over.new(
+ Arel::Nodes::NamedFunction.new('row_number', []),
+ Arel::Nodes::Window.new.order(arel_table[column_sym])
+ ).as('row_id')
+
+ count = arel_table.project("COUNT(1)").as('ct')
+
+ [column_row, row_id, count]
+ end
+ end
+
+ def median_projections(table, column_sym, partition_column)
+ projections = []
+ projections << table[partition_column] if partition_column
+ projections << average([extract_epoch(table[column_sym])], "median")
+ projections
+ end
+
def extract_epoch(arel_attribute)
Arel.sql(%Q{EXTRACT(EPOCH FROM "#{arel_attribute.relation.name}"."#{arel_attribute.name}")})
end
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index 2c35da8f1aa..dbe6259fce7 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -220,6 +220,15 @@ module Gitlab
# column - The name of the column to update.
# value - The value for the column.
#
+ # The `value` argument is typically a literal. To perform a computed
+ # update, an Arel literal can be used instead:
+ #
+ # update_value = Arel.sql('bar * baz')
+ #
+ # update_column_in_batches(:projects, :foo, update_value) do |table, query|
+ # query.where(table[:some_column].eq('hello'))
+ # end
+ #
# Rubocop's Metrics/AbcSize metric is disabled for this method as Rubocop
# determines this method to be too complex while there's no way to make it
# less "complex" without introducing extra methods (which actually will
@@ -376,10 +385,27 @@ module Gitlab
# necessary since we copy over old values further down.
change_column_default(table, new, old_col.default) if old_col.default
- trigger_name = rename_trigger_name(table, old, new)
+ install_rename_triggers(table, old, new)
+
+ update_column_in_batches(table, new, Arel::Table.new(table)[old])
+
+ change_column_null(table, new, false) unless old_col.null
+
+ copy_indexes(table, old, new)
+ copy_foreign_keys(table, old, new)
+ end
+
+ # Installs triggers in a table that keep a new column in sync with an old
+ # one.
+ #
+ # table - The name of the table to install the trigger in.
+ # old_column - The name of the old column.
+ # new_column - The name of the new column.
+ def install_rename_triggers(table, old_column, new_column)
+ trigger_name = rename_trigger_name(table, old_column, new_column)
quoted_table = quote_table_name(table)
- quoted_old = quote_column_name(old)
- quoted_new = quote_column_name(new)
+ quoted_old = quote_column_name(old_column)
+ quoted_new = quote_column_name(new_column)
if Database.postgresql?
install_rename_triggers_for_postgresql(trigger_name, quoted_table,
@@ -388,13 +414,6 @@ module Gitlab
install_rename_triggers_for_mysql(trigger_name, quoted_table,
quoted_old, quoted_new)
end
-
- update_column_in_batches(table, new, Arel::Table.new(table)[old])
-
- change_column_null(table, new, false) unless old_col.null
-
- copy_indexes(table, old, new)
- copy_foreign_keys(table, old, new)
end
# Changes the type of a column concurrently.
@@ -446,6 +465,99 @@ module Gitlab
remove_column(table, old)
end
+ # Changes the column type of a table using a background migration.
+ #
+ # Because this method uses a background migration it's more suitable for
+ # large tables. For small tables it's better to use
+ # `change_column_type_concurrently` since it can complete its work in a
+ # much shorter amount of time and doesn't rely on Sidekiq.
+ #
+ # Example usage:
+ #
+ # class Issue < ActiveRecord::Base
+ # self.table_name = 'issues'
+ #
+ # include EachBatch
+ #
+ # def self.to_migrate
+ # where('closed_at IS NOT NULL')
+ # end
+ # end
+ #
+ # change_column_type_using_background_migration(
+ # Issue.to_migrate,
+ # :closed_at,
+ # :datetime_with_timezone
+ # )
+ #
+ # Reverting a migration like this is done exactly the same way, just with
+ # a different type to migrate to (e.g. `:datetime` in the above example).
+ #
+ # relation - An ActiveRecord relation to use for scheduling jobs and
+ # figuring out what table we're modifying. This relation _must_
+ # have the EachBatch module included.
+ #
+ # column - The name of the column for which the type will be changed.
+ #
+ # new_type - The new type of the column.
+ #
+ # batch_size - The number of rows to schedule in a single background
+ # migration.
+ #
+ # interval - The time interval between every background migration.
+ def change_column_type_using_background_migration(
+ relation,
+ column,
+ new_type,
+ batch_size: 10_000,
+ interval: 10.minutes
+ )
+
+ unless relation.model < EachBatch
+ raise TypeError, 'The relation must include the EachBatch module'
+ end
+
+ temp_column = "#{column}_for_type_change"
+ table = relation.table_name
+ max_index = 0
+
+ add_column(table, temp_column, new_type)
+ install_rename_triggers(table, column, temp_column)
+
+ # Schedule the jobs that will copy the data from the old column to the
+ # new one. Rows with NULL values in our source column are skipped since
+ # the target column is already NULL at this point.
+ relation.where.not(column => nil).each_batch(of: batch_size) do |batch, index|
+ start_id, end_id = batch.pluck('MIN(id), MAX(id)').first
+ max_index = index
+
+ BackgroundMigrationWorker.perform_in(
+ index * interval,
+ 'CopyColumn',
+ [table, column, temp_column, start_id, end_id]
+ )
+ end
+
+ # Schedule the renaming of the column to happen (initially) 1 hour after
+ # the last batch finished.
+ BackgroundMigrationWorker.perform_in(
+ (max_index * interval) + 1.hour,
+ 'CleanupConcurrentTypeChange',
+ [table, column, temp_column]
+ )
+
+ if perform_background_migration_inline?
+ # To ensure the schema is up to date immediately we perform the
+ # migration inline in dev / test environments.
+ Gitlab::BackgroundMigration.steal('CopyColumn')
+ Gitlab::BackgroundMigration.steal('CleanupConcurrentTypeChange')
+ end
+ end
+
+ def perform_background_migration_inline?
+ Rails.env.test? || Rails.env.development?
+ end
+
# Performs a concurrent column rename when using PostgreSQL.
def install_rename_triggers_for_postgresql(trigger, table, old, new)
execute <<-EOF.strip_heredoc
@@ -694,14 +806,14 @@ into similar problems in the future (e.g. when new tables are created).
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
- BackgroundMigrationWorker.perform_bulk(jobs)
+ BackgroundMigrationWorker.bulk_perform_async(jobs)
jobs.clear
end
jobs << [job_class_name, [start_id, end_id]]
end
- BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
+ BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
end
# Queues background migration jobs for an entire table, batched by ID range.
@@ -732,6 +844,12 @@ into similar problems in the future (e.g. when new tables are created).
def queue_background_migration_jobs_by_range_at_intervals(model_class, job_class_name, delay_interval, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE)
raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id')
+ # To not overload the worker too much we enforce a minimum interval both
+ # when scheduling and performing jobs.
+ if delay_interval < BackgroundMigrationWorker::MIN_INTERVAL
+ delay_interval = BackgroundMigrationWorker::MIN_INTERVAL
+ end
+
model_class.each_batch(of: batch_size) do |relation, index|
start_id, end_id = relation.pluck('MIN(id), MAX(id)').first
diff --git a/lib/gitlab/database/rename_reserved_paths_migration/v1/migration_classes.rb b/lib/gitlab/database/rename_reserved_paths_migration/v1/migration_classes.rb
index 5481024db8e..fd4a8832ec2 100644
--- a/lib/gitlab/database/rename_reserved_paths_migration/v1/migration_classes.rb
+++ b/lib/gitlab/database/rename_reserved_paths_migration/v1/migration_classes.rb
@@ -6,7 +6,7 @@ module Gitlab
module Routable
def full_path
if route && route.path.present?
- @full_path ||= route.path
+ @full_path ||= route.path # rubocop:disable Gitlab/ModuleWithInstanceVariables
else
update_route if persisted?
@@ -30,7 +30,7 @@ module Gitlab
def prepare_route
route || build_route(source: self)
route.path = build_full_path
- @full_path = nil
+ @full_path = nil # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end
@@ -68,6 +68,11 @@ module Gitlab
has_one :route, as: :source
self.table_name = 'projects'
+ HASHED_STORAGE_FEATURES = {
+ repository: 1,
+ attachments: 2
+ }.freeze
+
def repository_storage_path
Gitlab.config.repositories.storages[repository_storage]['path']
end
@@ -76,6 +81,13 @@ module Gitlab
def self.name
'Project'
end
+
+ def hashed_storage?(feature)
+ raise ArgumentError, "Invalid feature" unless HASHED_STORAGE_FEATURES.include?(feature)
+ return false unless respond_to?(:storage_version)
+
+ self.storage_version && self.storage_version >= HASHED_STORAGE_FEATURES[feature]
+ end
end
end
end
diff --git a/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects.rb b/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects.rb
index 75a75f61953..979225dd216 100644
--- a/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects.rb
+++ b/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects.rb
@@ -22,9 +22,12 @@ module Gitlab
end
def move_project_folders(project, old_full_path, new_full_path)
- move_repository(project, old_full_path, new_full_path)
- move_repository(project, "#{old_full_path}.wiki", "#{new_full_path}.wiki")
- move_uploads(old_full_path, new_full_path)
+ unless project.hashed_storage?(:repository)
+ move_repository(project, old_full_path, new_full_path)
+ move_repository(project, "#{old_full_path}.wiki", "#{new_full_path}.wiki")
+ end
+
+ move_uploads(old_full_path, new_full_path) unless project.hashed_storage?(:attachments)
move_pages(old_full_path, new_full_path)
end
diff --git a/lib/gitlab/dependency_linker/composer_json_linker.rb b/lib/gitlab/dependency_linker/composer_json_linker.rb
index 0245bf4077a..cfd4ec15125 100644
--- a/lib/gitlab/dependency_linker/composer_json_linker.rb
+++ b/lib/gitlab/dependency_linker/composer_json_linker.rb
@@ -11,7 +11,7 @@ module Gitlab
end
def package_url(name)
- "https://packagist.org/packages/#{name}" if name =~ %r{\A#{REPO_REGEX}\z}
+ "https://packagist.org/packages/#{name}" if name =~ /\A#{REPO_REGEX}\z/
end
end
end
diff --git a/lib/gitlab/dependency_linker/gemfile_linker.rb b/lib/gitlab/dependency_linker/gemfile_linker.rb
index d034ea67387..bfea836bcb2 100644
--- a/lib/gitlab/dependency_linker/gemfile_linker.rb
+++ b/lib/gitlab/dependency_linker/gemfile_linker.rb
@@ -15,7 +15,7 @@ module Gitlab
link_regex(/(github:|:github\s*=>)\s*['"](?<name>[^'"]+)['"]/, &method(:github_url))
# Link `git: "https://gitlab.example.com/user/repo"` to https://gitlab.example.com/user/repo
- link_regex(%r{(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]}, &:itself)
+ link_regex(/(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]/, &:itself)
# Link `source "https://rubygems.org"` to https://rubygems.org
link_method_call('source', URL_REGEX, &:itself)
diff --git a/lib/gitlab/dependency_linker/podspec_linker.rb b/lib/gitlab/dependency_linker/podspec_linker.rb
index a52c7a02439..924e55e9820 100644
--- a/lib/gitlab/dependency_linker/podspec_linker.rb
+++ b/lib/gitlab/dependency_linker/podspec_linker.rb
@@ -12,7 +12,7 @@ module Gitlab
def link_dependencies
link_method_call('homepage', URL_REGEX, &:itself)
- link_regex(%r{(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]}, &:itself)
+ link_regex(/(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]/, &:itself)
link_method_call('license', &method(:license_url))
link_regex(/license\s*=\s*\{\s*(type:|:type\s*=>)\s*#{STRING_REGEX}/, &method(:license_url))
diff --git a/lib/gitlab/diff/diff_refs.rb b/lib/gitlab/diff/diff_refs.rb
index 371cbe04b9b..88e0db830f6 100644
--- a/lib/gitlab/diff/diff_refs.rb
+++ b/lib/gitlab/diff/diff_refs.rb
@@ -13,9 +13,9 @@ module Gitlab
def ==(other)
other.is_a?(self.class) &&
- base_sha == other.base_sha &&
- start_sha == other.start_sha &&
- head_sha == other.head_sha
+ Git.shas_eql?(base_sha, other.base_sha) &&
+ Git.shas_eql?(start_sha, other.start_sha) &&
+ Git.shas_eql?(head_sha, other.head_sha)
end
alias_method :eql?, :==
diff --git a/lib/gitlab/diff/file.rb b/lib/gitlab/diff/file.rb
index 1dabd4ebdd0..34b070dd375 100644
--- a/lib/gitlab/diff/file.rb
+++ b/lib/gitlab/diff/file.rb
@@ -5,7 +5,7 @@ module Gitlab
delegate :new_file?, :deleted_file?, :renamed_file?,
:old_path, :new_path, :a_mode, :b_mode, :mode_changed?,
- :submodule?, :expanded?, :too_large?, :collapsed?, :line_count, to: :diff, prefix: false
+ :submodule?, :expanded?, :too_large?, :collapsed?, :line_count, :has_binary_notice?, to: :diff, prefix: false
# Finding a viewer for a diff file happens based only on extension and whether the
# diff file blobs are binary or text, which means 1 diff file should only be matched by 1 viewer,
@@ -25,24 +25,35 @@ module Gitlab
@repository = repository
@diff_refs = diff_refs
@fallback_diff_refs = fallback_diff_refs
+
+ # Ensure items are collected in the the batch
+ new_blob
+ old_blob
end
- def position(line)
+ def position(position_marker, position_type: :text)
return unless diff_refs
- Position.new(
+ data = {
+ diff_refs: diff_refs,
+ position_type: position_type.to_s,
old_path: old_path,
- new_path: new_path,
- old_line: line.old_line,
- new_line: line.new_line,
- diff_refs: diff_refs
- )
+ new_path: new_path
+ }
+
+ if position_type == :text
+ data.merge!(text_position_properties(position_marker))
+ else
+ data.merge!(image_position_properties(position_marker))
+ end
+
+ Position.new(data)
end
def line_code(line)
return if line.meta?
- Gitlab::Diff::LineCode.generate(file_path, line.new_pos, line.old_pos)
+ Gitlab::Git.diff_line_code(file_path, line.new_pos, line.old_pos)
end
def line_for_line_code(code)
@@ -50,7 +61,9 @@ module Gitlab
end
def line_for_position(pos)
- diff_lines.find { |line| position(line) == pos }
+ return nil unless pos.position_type == 'text'
+
+ diff_lines.find { |line| line.old_line == pos.old_line && line.new_line == pos.new_line }
end
def position_for_line_code(code)
@@ -88,29 +101,25 @@ module Gitlab
end
def new_blob
- return @new_blob if defined?(@new_blob)
+ return unless new_content_sha
- sha = new_content_sha
- return @new_blob = nil unless sha
-
- @new_blob = repository.blob_at(sha, file_path)
+ Blob.lazy(repository.project, new_content_sha, file_path)
end
def old_blob
- return @old_blob if defined?(@old_blob)
-
- sha = old_content_sha
- return @old_blob = nil unless sha
+ return unless old_content_sha
- @old_blob = repository.blob_at(sha, old_path)
+ Blob.lazy(repository.project, old_content_sha, old_path)
end
def content_sha
new_content_sha || old_content_sha
end
+ # Use #itself to check the value wrapped by a BatchLoader instance, rather
+ # than if the BatchLoader instance itself is falsey.
def blob
- new_blob || old_blob
+ new_blob&.itself || old_blob&.itself
end
attr_writer :highlighted_diff_lines
@@ -166,7 +175,7 @@ module Gitlab
end
def binary?
- old_blob&.binary? || new_blob&.binary?
+ has_binary_notice? || try_blobs(:binary?)
end
def text?
@@ -174,15 +183,15 @@ module Gitlab
end
def external_storage_error?
- old_blob&.external_storage_error? || new_blob&.external_storage_error?
+ try_blobs(:external_storage_error?)
end
def stored_externally?
- old_blob&.stored_externally? || new_blob&.stored_externally?
+ try_blobs(:stored_externally?)
end
def external_storage
- old_blob&.external_storage || new_blob&.external_storage
+ try_blobs(:external_storage)
end
def content_changed?
@@ -197,15 +206,15 @@ module Gitlab
end
def size
- [old_blob&.size, new_blob&.size].compact.sum
+ valid_blobs.map(&:size).sum
end
def raw_size
- [old_blob&.raw_size, new_blob&.raw_size].compact.sum
+ valid_blobs.map(&:raw_size).sum
end
def raw_binary?
- old_blob&.raw_binary? || new_blob&.raw_binary?
+ try_blobs(:raw_binary?)
end
def raw_text?
@@ -228,6 +237,27 @@ module Gitlab
private
+ # The blob instances are instances of BatchLoader, which means calling
+ # &. directly on them won't work. Object#try also won't work, because Blob
+ # doesn't inherit from Object, but from BasicObject (via SimpleDelegator).
+ def try_blobs(meth)
+ old_blob&.itself&.public_send(meth) || new_blob&.itself&.public_send(meth)
+ end
+
+ # We can't use #compact for the same reason we can't use &., but calling
+ # #nil? explicitly does work because it is proxied to the blob itself.
+ def valid_blobs
+ [old_blob, new_blob].reject(&:nil?)
+ end
+
+ def text_position_properties(line)
+ { old_line: line.old_line, new_line: line.new_line }
+ end
+
+ def image_position_properties(image_point)
+ image_point.to_h
+ end
+
def blobs_changed?
old_blob && new_blob && old_blob.id != new_blob.id
end
diff --git a/lib/gitlab/diff/file_collection/base.rb b/lib/gitlab/diff/file_collection/base.rb
index 88ae65cb468..a6007ebf531 100644
--- a/lib/gitlab/diff/file_collection/base.rb
+++ b/lib/gitlab/diff/file_collection/base.rb
@@ -22,10 +22,7 @@ module Gitlab
end
def diff_files
- # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37445
- Gitlab::GitalyClient.allow_n_plus_1_calls do
- @diff_files ||= @diffs.decorate! { |diff| decorate_diff!(diff) }
- end
+ @diff_files ||= @diffs.decorate! { |diff| decorate_diff!(diff) }
end
def diff_file_with_old_path(old_path)
diff --git a/lib/gitlab/diff/formatters/base_formatter.rb b/lib/gitlab/diff/formatters/base_formatter.rb
new file mode 100644
index 00000000000..5e923b9e602
--- /dev/null
+++ b/lib/gitlab/diff/formatters/base_formatter.rb
@@ -0,0 +1,61 @@
+module Gitlab
+ module Diff
+ module Formatters
+ class BaseFormatter
+ attr_reader :old_path
+ attr_reader :new_path
+ attr_reader :base_sha
+ attr_reader :start_sha
+ attr_reader :head_sha
+ attr_reader :position_type
+
+ def initialize(attrs)
+ if diff_file = attrs[:diff_file]
+ attrs[:diff_refs] = diff_file.diff_refs
+ attrs[:old_path] = diff_file.old_path
+ attrs[:new_path] = diff_file.new_path
+ end
+
+ if diff_refs = attrs[:diff_refs]
+ attrs[:base_sha] = diff_refs.base_sha
+ attrs[:start_sha] = diff_refs.start_sha
+ attrs[:head_sha] = diff_refs.head_sha
+ end
+
+ @old_path = attrs[:old_path]
+ @new_path = attrs[:new_path]
+ @base_sha = attrs[:base_sha]
+ @start_sha = attrs[:start_sha]
+ @head_sha = attrs[:head_sha]
+ end
+
+ def key
+ [base_sha, start_sha, head_sha, Digest::SHA1.hexdigest(old_path || ""), Digest::SHA1.hexdigest(new_path || "")]
+ end
+
+ def to_h
+ {
+ base_sha: base_sha,
+ start_sha: start_sha,
+ head_sha: head_sha,
+ old_path: old_path,
+ new_path: new_path,
+ position_type: position_type
+ }
+ end
+
+ def position_type
+ raise NotImplementedError
+ end
+
+ def ==(other)
+ raise NotImplementedError
+ end
+
+ def complete?
+ raise NotImplementedError
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/diff/formatters/image_formatter.rb b/lib/gitlab/diff/formatters/image_formatter.rb
new file mode 100644
index 00000000000..ccd0d309972
--- /dev/null
+++ b/lib/gitlab/diff/formatters/image_formatter.rb
@@ -0,0 +1,43 @@
+module Gitlab
+ module Diff
+ module Formatters
+ class ImageFormatter < BaseFormatter
+ attr_reader :width
+ attr_reader :height
+ attr_reader :x
+ attr_reader :y
+
+ def initialize(attrs)
+ @x = attrs[:x]
+ @y = attrs[:y]
+ @width = attrs[:width]
+ @height = attrs[:height]
+
+ super(attrs)
+ end
+
+ def key
+ @key ||= super.push(x, y)
+ end
+
+ def complete?
+ x && y && width && height
+ end
+
+ def to_h
+ super.merge(width: width, height: height, x: x, y: y)
+ end
+
+ def position_type
+ "image"
+ end
+
+ def ==(other)
+ other.is_a?(self.class) &&
+ x == other.x &&
+ y == other.y
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/diff/formatters/text_formatter.rb b/lib/gitlab/diff/formatters/text_formatter.rb
new file mode 100644
index 00000000000..01c7e9f51ab
--- /dev/null
+++ b/lib/gitlab/diff/formatters/text_formatter.rb
@@ -0,0 +1,49 @@
+module Gitlab
+ module Diff
+ module Formatters
+ class TextFormatter < BaseFormatter
+ attr_reader :old_line
+ attr_reader :new_line
+
+ def initialize(attrs)
+ @old_line = attrs[:old_line]
+ @new_line = attrs[:new_line]
+
+ super(attrs)
+ end
+
+ def key
+ @key ||= super.push(old_line, new_line)
+ end
+
+ def complete?
+ old_line || new_line
+ end
+
+ def to_h
+ super.merge(old_line: old_line, new_line: new_line)
+ end
+
+ def line_age
+ if old_line && new_line
+ nil
+ elsif new_line
+ 'new'
+ else
+ 'old'
+ end
+ end
+
+ def position_type
+ "text"
+ end
+
+ def ==(other)
+ other.is_a?(self.class) &&
+ new_line == other.new_line &&
+ old_line == other.old_line
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/diff/highlight.rb b/lib/gitlab/diff/highlight.rb
index b669ee5b799..269016daac2 100644
--- a/lib/gitlab/diff/highlight.rb
+++ b/lib/gitlab/diff/highlight.rb
@@ -14,6 +14,7 @@ module Gitlab
else
@diff_lines = diff_lines
end
+
@raw_lines = @diff_lines.map(&:text)
end
@@ -26,7 +27,17 @@ module Gitlab
rich_line = highlight_line(diff_line) || diff_line.text
if line_inline_diffs = inline_diffs[i]
- rich_line = InlineDiffMarker.new(diff_line.text, rich_line).mark(line_inline_diffs)
+ begin
+ rich_line = InlineDiffMarker.new(diff_line.text, rich_line).mark(line_inline_diffs)
+ # This should only happen when the encoding of the diff doesn't
+ # match the blob, which is a bug. But we shouldn't fail to render
+ # completely in that case, even though we want to report the error.
+ rescue RangeError => e
+ if Gitlab::Sentry.enabled?
+ Gitlab::Sentry.context
+ Raven.capture_exception(e)
+ end
+ end
end
diff_line.text = rich_line
diff --git a/lib/gitlab/diff/image_point.rb b/lib/gitlab/diff/image_point.rb
new file mode 100644
index 00000000000..65332dfd239
--- /dev/null
+++ b/lib/gitlab/diff/image_point.rb
@@ -0,0 +1,23 @@
+module Gitlab
+ module Diff
+ class ImagePoint
+ attr_reader :width, :height, :x, :y
+
+ def initialize(width, height, x, y)
+ @width = width
+ @height = height
+ @x = x
+ @y = y
+ end
+
+ def to_h
+ {
+ width: width,
+ height: height,
+ x: x,
+ y: y
+ }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/diff/inline_diff.rb b/lib/gitlab/diff/inline_diff.rb
index 55708d42161..54783a07919 100644
--- a/lib/gitlab/diff/inline_diff.rb
+++ b/lib/gitlab/diff/inline_diff.rb
@@ -70,7 +70,7 @@ module Gitlab
def find_changed_line_pairs(lines)
# Prefixes of all diff lines, indicating their types
# For example: `" - + -+ ---+++ --+ -++"`
- line_prefixes = lines.each_with_object("") { |line, s| s << line[0] }.gsub(/[^ +-]/, ' ')
+ line_prefixes = lines.each_with_object("") { |line, s| s << (line[0] || ' ') }.gsub(/[^ +-]/, ' ')
changed_line_pairs = []
line_prefixes.scan(LINE_PAIRS_PATTERN) do
@@ -102,6 +102,7 @@ module Gitlab
new_char = b[pos]
break if old_char != new_char
+
length += 1
end
diff --git a/lib/gitlab/diff/line_code.rb b/lib/gitlab/diff/line_code.rb
deleted file mode 100644
index f3578ab3d35..00000000000
--- a/lib/gitlab/diff/line_code.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-module Gitlab
- module Diff
- class LineCode
- def self.generate(file_path, new_line_position, old_line_position)
- "#{Digest::SHA1.hexdigest(file_path)}_#{old_line_position}_#{new_line_position}"
- end
- end
- end
-end
diff --git a/lib/gitlab/diff/parser.rb b/lib/gitlab/diff/parser.rb
index 742f989c50b..8302f30a0a2 100644
--- a/lib/gitlab/diff/parser.rb
+++ b/lib/gitlab/diff/parser.rb
@@ -17,7 +17,9 @@ module Gitlab
# without having to instantiate all the others that come after it.
Enumerator.new do |yielder|
@lines.each do |line|
- next if filename?(line)
+ # We're expecting a filename parameter only in a meta-part of the diff content
+ # when type is defined then we're already in a content-part
+ next if filename?(line) && type.nil?
full_line = line.delete("\n")
@@ -28,6 +30,7 @@ module Gitlab
line_new = line.match(/\+[0-9]*/)[0].to_i.abs rescue 0
next if line_old <= 1 && line_new <= 1 # top of file
+
yielder << Gitlab::Diff::Line.new(full_line, type, line_obj_index, line_old, line_new)
line_obj_index += 1
next
diff --git a/lib/gitlab/diff/position.rb b/lib/gitlab/diff/position.rb
index f80afb20f0c..690b27cde81 100644
--- a/lib/gitlab/diff/position.rb
+++ b/lib/gitlab/diff/position.rb
@@ -1,37 +1,25 @@
-# Defines a specific location, identified by paths and line numbers,
+# Defines a specific location, identified by paths line numbers and image coordinates,
# within a specific diff, identified by start, head and base commit ids.
module Gitlab
module Diff
class Position
- attr_reader :old_path
- attr_reader :new_path
- attr_reader :old_line
- attr_reader :new_line
- attr_reader :base_sha
- attr_reader :start_sha
- attr_reader :head_sha
-
+ attr_accessor :formatter
+
+ delegate :old_path,
+ :new_path,
+ :base_sha,
+ :start_sha,
+ :head_sha,
+ :old_line,
+ :new_line,
+ :position_type, to: :formatter
+
+ # A position can belong to a text line or to an image coordinate
+ # it depends of the position_type argument.
+ # Text position will have: new_line and old_line
+ # Image position will have: width, height, x, y
def initialize(attrs = {})
- if diff_file = attrs[:diff_file]
- attrs[:diff_refs] = diff_file.diff_refs
- attrs[:old_path] = diff_file.old_path
- attrs[:new_path] = diff_file.new_path
- end
-
- if diff_refs = attrs[:diff_refs]
- attrs[:base_sha] = diff_refs.base_sha
- attrs[:start_sha] = diff_refs.start_sha
- attrs[:head_sha] = diff_refs.head_sha
- end
-
- @old_path = attrs[:old_path]
- @new_path = attrs[:new_path]
- @base_sha = attrs[:base_sha]
- @start_sha = attrs[:start_sha]
- @head_sha = attrs[:head_sha]
-
- @old_line = attrs[:old_line]
- @new_line = attrs[:new_line]
+ @formatter = get_formatter_class(attrs[:position_type]).new(attrs)
end
# `Gitlab::Diff::Position` objects are stored as serialized attributes in
@@ -46,27 +34,23 @@ module Gitlab
end
def encode_with(coder)
- coder['attributes'] = self.to_h
+ coder['attributes'] = formatter.to_h
end
def key
- @key ||= [base_sha, start_sha, head_sha, Digest::SHA1.hexdigest(old_path || ""), Digest::SHA1.hexdigest(new_path || ""), old_line, new_line]
+ formatter.key
end
def ==(other)
- other.is_a?(self.class) && key == other.key
+ other.is_a?(self.class) &&
+ other.diff_refs == diff_refs &&
+ other.old_path == old_path &&
+ other.new_path == new_path &&
+ other.formatter == formatter
end
def to_h
- {
- old_path: old_path,
- new_path: new_path,
- old_line: old_line,
- new_line: new_line,
- base_sha: base_sha,
- start_sha: start_sha,
- head_sha: head_sha
- }
+ formatter.to_h
end
def inspect
@@ -74,23 +58,15 @@ module Gitlab
end
def complete?
- file_path.present? &&
- (old_line || new_line) &&
- diff_refs.complete?
+ file_path.present? && formatter.complete? && diff_refs.complete?
end
def to_json(opts = nil)
- JSON.generate(self.to_h, opts)
+ JSON.generate(formatter.to_h, opts)
end
def type
- if old_line && new_line
- nil
- elsif new_line
- 'new'
- else
- 'old'
- end
+ formatter.line_age
end
def unchanged?
@@ -118,7 +94,9 @@ module Gitlab
end
def diff_file(repository)
- @diff_file ||= begin
+ return @diff_file if defined?(@diff_file)
+
+ @diff_file = begin
if RequestStore.active?
key = {
project_id: repository.project.id,
@@ -146,8 +124,20 @@ module Gitlab
def find_diff_file(repository)
return unless diff_refs.complete?
+ return unless comparison = diff_refs.compare_in(repository.project)
+
+ comparison.diffs(paths: paths, expanded: true).diff_files.first
+ end
- diff_refs.compare_in(repository.project).diffs(paths: paths, expanded: true).diff_files.first
+ def get_formatter_class(type)
+ type ||= "text"
+
+ case type
+ when 'image'
+ Gitlab::Diff::Formatters::ImageFormatter
+ else
+ Gitlab::Diff::Formatters::TextFormatter
+ end
end
end
end
diff --git a/lib/gitlab/ee_compat_check.rb b/lib/gitlab/ee_compat_check.rb
index c5a8ea12245..0fb71976883 100644
--- a/lib/gitlab/ee_compat_check.rb
+++ b/lib/gitlab/ee_compat_check.rb
@@ -2,10 +2,10 @@
module Gitlab
# Checks if a set of migrations requires downtime or not.
class EeCompatCheck
- CE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ce.git'.freeze
- EE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze
+ DEFAULT_CE_PROJECT_URL = 'https://gitlab.com/gitlab-org/gitlab-ce'.freeze
+ EE_REPO_URL = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze
CHECK_DIR = Rails.root.join('ee_compat_check')
- IGNORED_FILES_REGEX = /(VERSION|CHANGELOG\.md:\d+)/.freeze
+ IGNORED_FILES_REGEX = %r{VERSION|CHANGELOG\.md|db/schema\.rb}i.freeze
PLEASE_READ_THIS_BANNER = %Q{
============================================================
===================== PLEASE READ THIS =====================
@@ -17,28 +17,36 @@ module Gitlab
============================================================\n
}.freeze
- attr_reader :ee_repo_dir, :patches_dir, :ce_repo, :ce_branch, :ee_branch_found
- attr_reader :failed_files
+ attr_reader :ee_repo_dir, :patches_dir, :ce_project_url, :ce_repo_url, :ce_branch, :ee_branch_found
+ attr_reader :job_id, :failed_files
- def initialize(branch:, ce_repo: CE_REPO)
+ def initialize(branch:, ce_project_url: DEFAULT_CE_PROJECT_URL, job_id: nil)
@ee_repo_dir = CHECK_DIR.join('ee-repo')
@patches_dir = CHECK_DIR.join('patches')
@ce_branch = branch
- @ce_repo = ce_repo
+ @ce_project_url = ce_project_url
+ @ce_repo_url = "#{ce_project_url}.git"
+ @job_id = job_id
end
def check
ensure_patches_dir
- generate_patch(ce_branch, ce_patch_full_path)
+ add_remote('canonical-ce', "#{DEFAULT_CE_PROJECT_URL}.git")
+ generate_patch(branch: ce_branch, patch_path: ce_patch_full_path, remote: 'canonical-ce')
ensure_ee_repo
Dir.chdir(ee_repo_dir) do
step("In the #{ee_repo_dir} directory")
+ add_remote('canonical-ee', EE_REPO_URL)
+
status = catch(:halt_check) do
ce_branch_compat_check!
delete_ee_branches_locally!
ee_branch_presence_check!
+
+ step("Checking out #{ee_branch_found}", %W[git checkout -b #{ee_branch_found} canonical-ee/#{ee_branch_found}])
+ generate_patch(branch: ee_branch_found, patch_path: ee_patch_full_path, remote: 'canonical-ee')
ee_branch_compat_check!
end
@@ -54,13 +62,20 @@ module Gitlab
private
+ def add_remote(name, url)
+ step(
+ "Adding the #{name} remote (#{url})",
+ %W[git remote add #{name} #{url}]
+ )
+ end
+
def ensure_ee_repo
if Dir.exist?(ee_repo_dir)
step("#{ee_repo_dir} already exists")
else
step(
- "Cloning #{EE_REPO} into #{ee_repo_dir}",
- %W[git clone --branch master --single-branch --depth=200 #{EE_REPO} #{ee_repo_dir}]
+ "Cloning #{EE_REPO_URL} into #{ee_repo_dir}",
+ %W[git clone --branch master --single-branch --depth=200 #{EE_REPO_URL} #{ee_repo_dir}]
)
end
end
@@ -69,14 +84,14 @@ module Gitlab
FileUtils.mkdir_p(patches_dir)
end
- def generate_patch(branch, patch_path)
+ def generate_patch(branch:, patch_path:, remote:)
FileUtils.rm(patch_path, force: true)
- find_merge_base_with_master(branch: branch)
+ find_merge_base_with_master(branch: branch, master_remote: remote)
step(
- "Generating the patch against origin/master in #{patch_path}",
- %w[git diff --binary origin/master...HEAD]
+ "Generating the patch against #{remote}/master in #{patch_path}",
+ %W[git diff --binary #{remote}/master...origin/#{branch}]
) do |output, status|
throw(:halt_check, :ko) unless status.zero?
@@ -94,14 +109,14 @@ module Gitlab
end
def ee_branch_presence_check!
- _, status = step("Fetching origin/#{ee_branch_prefix}", %W[git fetch origin #{ee_branch_prefix}])
+ _, status = step("Fetching origin/#{ee_branch_prefix}", %W[git fetch canonical-ee #{ee_branch_prefix}])
if status.zero?
@ee_branch_found = ee_branch_prefix
return
end
- _, status = step("Fetching origin/#{ee_branch_suffix}", %W[git fetch origin #{ee_branch_suffix}])
+ _, status = step("Fetching origin/#{ee_branch_suffix}", %W[git fetch canonical-ee #{ee_branch_suffix}])
if status.zero?
@ee_branch_found = ee_branch_suffix
@@ -114,10 +129,6 @@ module Gitlab
end
def ee_branch_compat_check!
- step("Checking out origin/#{ee_branch_found}", %W[git checkout -b #{ee_branch_found} FETCH_HEAD])
-
- generate_patch(ee_branch_found, ee_patch_full_path)
-
unless check_patch(ee_patch_full_path).zero?
puts
puts ee_branch_doesnt_apply_cleanly_msg
@@ -131,8 +142,7 @@ module Gitlab
def check_patch(patch_path)
step("Checking out master", %w[git checkout master])
- step("Resetting to latest master", %w[git reset --hard origin/master])
- step("Fetching CE/#{ce_branch}", %W[git fetch #{CE_REPO} #{ce_branch}])
+ step("Resetting to latest master", %w[git reset --hard canonical-ee/master])
step(
"Checking if #{patch_path} applies cleanly to EE/master",
# Don't use --check here because it can result in a 0-exit status even
@@ -146,12 +156,14 @@ module Gitlab
%W[git apply --3way #{patch_path}]
) do |output, status|
puts output
+
unless status.zero?
@failed_files = output.lines.reduce([]) do |memo, line|
if line.start_with?('error: patch failed:')
file = line.sub(/\Aerror: patch failed: /, '')
memo << file unless file =~ IGNORED_FILES_REGEX
end
+
memo
end
@@ -169,10 +181,10 @@ module Gitlab
command(%W[git branch --delete --force #{ee_branch_suffix}])
end
- def merge_base_found?
+ def merge_base_found?(master_remote:, branch:)
step(
- "Finding merge base with master",
- %w[git merge-base origin/master HEAD]
+ "Finding merge base with #{master_remote}/master",
+ %W[git merge-base #{master_remote}/master origin/#{branch}]
) do |output, status|
if status.zero?
puts "Merge base was found: #{output}"
@@ -181,7 +193,7 @@ module Gitlab
end
end
- def find_merge_base_with_master(branch:)
+ def find_merge_base_with_master(branch:, master_remote:)
# Start with (Math.exp(3).to_i = 20) until (Math.exp(6).to_i = 403)
# In total we go (20 + 54 + 148 + 403 = 625) commits deeper
depth = 20
@@ -190,19 +202,19 @@ module Gitlab
depth += Math.exp(factor).to_i
# Repository is initially cloned with a depth of 20 so we need to fetch
# deeper in the case the branch has more than 20 commits on top of master
- fetch(branch: branch, depth: depth)
- fetch(branch: 'master', depth: depth)
+ fetch(branch: branch, depth: depth, remote: 'origin')
+ fetch(branch: 'master', depth: depth, remote: master_remote)
- merge_base_found?
+ merge_base_found?(master_remote: master_remote, branch: branch)
end
- raise "\n#{branch} is too far behind master, please rebase it!\n" unless success
+ raise "\n#{branch} is too far behind #{master_remote}/master, please rebase it!\n" unless success
end
- def fetch(branch:, depth:)
+ def fetch(branch:, depth:, remote: 'origin')
step(
"Fetching deeper...",
- %W[git fetch --depth=#{depth} --prune origin +refs/heads/#{branch}:refs/remotes/origin/#{branch}]
+ %W[git fetch --depth=#{depth} --prune #{remote} +refs/heads/#{branch}:refs/remotes/#{remote}/#{branch}]
) do |output, status|
raise "Fetch failed: #{output}" unless status.zero?
end
@@ -237,7 +249,7 @@ module Gitlab
end
def patch_url
- "https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/#{ENV['CI_JOB_ID']}/artifacts/raw/ee_compat_check/patches/#{ce_patch_name}"
+ "#{ce_project_url}/-/jobs/#{job_id}/artifacts/raw/ee_compat_check/patches/#{ce_patch_name}"
end
def step(desc, cmd = nil)
@@ -270,7 +282,7 @@ module Gitlab
The `#{branch}` branch applies cleanly to EE/master!
Much ❤️! For more information, see
- https://docs.gitlab.com/ce/development/limit_ee_conflicts.html#check-the-rake-ee_compat_check-in-your-merge-requests
+ https://docs.gitlab.com/ce/development/automatic_ce_ee_merge.html
#{THANKS_FOR_READING_BANNER}
}
end
@@ -302,9 +314,9 @@ module Gitlab
1. Create a new branch from master and cherry-pick your CE commits
# In the EE repo
- $ git fetch origin
- $ git checkout -b #{ee_branch_prefix} origin/master
- $ git fetch #{ce_repo} #{ce_branch}
+ $ git fetch #{EE_REPO_URL} master
+ $ git checkout -b #{ee_branch_prefix} FETCH_HEAD
+ $ git fetch #{ce_repo_url} #{ce_branch}
$ git cherry-pick SHA # Repeat for all the commits you want to pick
You can squash the `#{ce_branch}` commits into a single "Port of #{ce_branch} to EE" commit.
@@ -312,10 +324,9 @@ module Gitlab
2. Apply your branch's patch to EE
# In the EE repo
- $ git fetch origin master
- $ git checkout -b #{ee_branch_prefix} origin/master
- $ wget #{patch_url}
- $ git apply --3way #{ce_patch_name}
+ $ git fetch #{EE_REPO_URL} master
+ $ git checkout -b #{ee_branch_prefix} FETCH_HEAD
+ $ wget #{patch_url} && git apply --3way #{ce_patch_name}
At this point you might have conflicts such as:
@@ -348,7 +359,7 @@ module Gitlab
Once this is done, you can retry this failed build, and it should pass.
Stay 💪 ! For more information, see
- https://docs.gitlab.com/ce/development/limit_ee_conflicts.html#check-the-rake-ee_compat_check-in-your-merge-requests
+ https://docs.gitlab.com/ce/development/automatic_ce_ee_merge.html
#{THANKS_FOR_READING_BANNER}
}
end
@@ -369,7 +380,7 @@ module Gitlab
retry this build.
Stay 💪 ! For more information, see
- https://docs.gitlab.com/ce/development/limit_ee_conflicts.html#check-the-rake-ee_compat_check-in-your-merge-requests
+ https://docs.gitlab.com/ce/development/automatic_ce_ee_merge.html
#{THANKS_FOR_READING_BANNER}
}
end
diff --git a/lib/gitlab/email/handler.rb b/lib/gitlab/email/handler.rb
index b07c68d1498..e08b5be8984 100644
--- a/lib/gitlab/email/handler.rb
+++ b/lib/gitlab/email/handler.rb
@@ -1,3 +1,4 @@
+require 'gitlab/email/handler/create_merge_request_handler'
require 'gitlab/email/handler/create_note_handler'
require 'gitlab/email/handler/create_issue_handler'
require 'gitlab/email/handler/unsubscribe_handler'
@@ -8,6 +9,7 @@ module Gitlab
HANDLERS = [
UnsubscribeHandler,
CreateNoteHandler,
+ CreateMergeRequestHandler,
CreateIssueHandler
].freeze
diff --git a/lib/gitlab/email/handler/create_merge_request_handler.rb b/lib/gitlab/email/handler/create_merge_request_handler.rb
new file mode 100644
index 00000000000..3436306e122
--- /dev/null
+++ b/lib/gitlab/email/handler/create_merge_request_handler.rb
@@ -0,0 +1,70 @@
+require 'gitlab/email/handler/base_handler'
+require 'gitlab/email/handler/reply_processing'
+
+module Gitlab
+ module Email
+ module Handler
+ class CreateMergeRequestHandler < BaseHandler
+ include ReplyProcessing
+ attr_reader :project_path, :incoming_email_token
+
+ def initialize(mail, mail_key)
+ super(mail, mail_key)
+
+ if m = /\A([^\+]*)\+merge-request\+(.*)/.match(mail_key.to_s)
+ @project_path, @incoming_email_token = m.captures
+ end
+ end
+
+ def can_handle?
+ @project_path && @incoming_email_token
+ end
+
+ def execute
+ raise ProjectNotFound unless project
+
+ validate_permission!(:create_merge_request)
+
+ verify_record!(
+ record: create_merge_request,
+ invalid_exception: InvalidMergeRequestError,
+ record_name: 'merge_request')
+ end
+
+ def author
+ @author ||= User.find_by(incoming_email_token: incoming_email_token)
+ end
+
+ def project
+ @project ||= Project.find_by_full_path(project_path)
+ end
+
+ def metrics_params
+ super.merge(project: project&.full_path)
+ end
+
+ private
+
+ def create_merge_request
+ merge_request = MergeRequests::BuildService.new(project, author, merge_request_params).execute
+
+ if merge_request.errors.any?
+ merge_request
+ else
+ MergeRequests::CreateService.new(project, author).create(merge_request)
+ end
+ end
+
+ def merge_request_params
+ params = {
+ source_project_id: project.id,
+ source_branch: mail.subject,
+ target_project_id: project.id
+ }
+ params[:description] = message if message.present?
+ params
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/email/handler/unsubscribe_handler.rb b/lib/gitlab/email/handler/unsubscribe_handler.rb
index 5894384da5d..ea80e21532e 100644
--- a/lib/gitlab/email/handler/unsubscribe_handler.rb
+++ b/lib/gitlab/email/handler/unsubscribe_handler.rb
@@ -16,6 +16,7 @@ module Gitlab
noteable = sent_notification.noteable
raise NoteableNotFoundError unless noteable
+
noteable.unsubscribe(sent_notification.recipient)
end
diff --git a/lib/gitlab/email/receiver.rb b/lib/gitlab/email/receiver.rb
index c8f4591d060..d8c594ad0e7 100644
--- a/lib/gitlab/email/receiver.rb
+++ b/lib/gitlab/email/receiver.rb
@@ -13,8 +13,10 @@ module Gitlab
UserBlockedError = Class.new(ProcessingError)
UserNotAuthorizedError = Class.new(ProcessingError)
NoteableNotFoundError = Class.new(ProcessingError)
- InvalidNoteError = Class.new(ProcessingError)
- InvalidIssueError = Class.new(ProcessingError)
+ InvalidRecordError = Class.new(ProcessingError)
+ InvalidNoteError = Class.new(InvalidRecordError)
+ InvalidIssueError = Class.new(InvalidRecordError)
+ InvalidMergeRequestError = Class.new(InvalidRecordError)
UnknownIncomingEmail = Class.new(ProcessingError)
class Receiver
diff --git a/lib/gitlab/email/reply_parser.rb b/lib/gitlab/email/reply_parser.rb
index 558df87f36d..01c28d051ee 100644
--- a/lib/gitlab/email/reply_parser.rb
+++ b/lib/gitlab/email/reply_parser.rb
@@ -43,7 +43,7 @@ module Gitlab
return "" unless decoded
# Certain trigger phrases that means we didn't parse correctly
- if decoded =~ /(Content\-Type\:|multipart\/alternative|text\/plain)/
+ if decoded =~ %r{(Content\-Type\:|multipart/alternative|text/plain)}
return ""
end
diff --git a/lib/gitlab/emoji.rb b/lib/gitlab/emoji.rb
index e3e36b35ce9..89cf659bce4 100644
--- a/lib/gitlab/emoji.rb
+++ b/lib/gitlab/emoji.rb
@@ -31,8 +31,7 @@ module Gitlab
end
def emoji_unicode_version(name)
- @emoji_unicode_versions_by_name ||= JSON.parse(File.read(Rails.root.join('fixtures', 'emojis', 'emoji-unicode-version-map.json')))
- @emoji_unicode_versions_by_name[name]
+ emoji_unicode_versions_by_name[name]
end
def normalize_emoji_name(name)
@@ -56,5 +55,12 @@ module Gitlab
ActionController::Base.helpers.content_tag('gl-emoji', emoji_info['moji'], title: emoji_info['description'], data: data)
end
+
+ private
+
+ def emoji_unicode_versions_by_name
+ @emoji_unicode_versions_by_name ||=
+ JSON.parse(File.read(Rails.root.join('fixtures', 'emojis', 'emoji-unicode-version-map.json')))
+ end
end
end
diff --git a/lib/gitlab/encoding_helper.rb b/lib/gitlab/encoding_helper.rb
index 7b3483a7f96..6659efa0961 100644
--- a/lib/gitlab/encoding_helper.rb
+++ b/lib/gitlab/encoding_helper.rb
@@ -14,10 +14,7 @@ module Gitlab
ENCODING_CONFIDENCE_THRESHOLD = 50
def encode!(message)
- return nil unless message.respond_to? :force_encoding
-
- # if message is utf-8 encoding, just return it
- message.force_encoding("UTF-8")
+ message = force_encode_utf8(message)
return message if message.valid_encoding?
# return message if message type is binary
@@ -31,7 +28,9 @@ module Gitlab
# encode and clean the bad chars
message.replace clean(message)
- rescue
+ rescue ArgumentError => e
+ return unless e.message.include?('unknown encoding name')
+
encoding = detect ? detect[:encoding] : "unknown"
"--broken encoding: #{encoding}"
end
@@ -50,6 +49,9 @@ module Gitlab
end
def encode_utf8(message)
+ message = force_encode_utf8(message)
+ return message if message.valid_encoding?
+
detect = CharlockHolmes::EncodingDetector.detect(message)
if detect && detect[:encoding]
begin
@@ -62,10 +64,31 @@ module Gitlab
else
clean(message)
end
+ rescue ArgumentError
+ return nil
+ end
+
+ def encode_binary(s)
+ return "" if s.nil?
+
+ s.dup.force_encoding(Encoding::ASCII_8BIT)
+ end
+
+ def binary_stringio(s)
+ StringIO.new(s || '').tap { |io| io.set_encoding(Encoding::ASCII_8BIT) }
end
private
+ def force_encode_utf8(message)
+ raise ArgumentError unless message.respond_to?(:force_encoding)
+ return message if message.encoding == Encoding::UTF_8 && message.valid_encoding?
+
+ message = message.dup if message.respond_to?(:frozen?) && message.frozen?
+
+ message.force_encoding("UTF-8")
+ end
+
def clean(message)
message.encode("UTF-16BE", undef: :replace, invalid: :replace, replace: "")
.encode("UTF-8")
diff --git a/lib/gitlab/exclusive_lease.rb b/lib/gitlab/exclusive_lease.rb
index 3f7b42456af..dbb8f317afe 100644
--- a/lib/gitlab/exclusive_lease.rb
+++ b/lib/gitlab/exclusive_lease.rb
@@ -71,5 +71,16 @@ module Gitlab
redis.exists(@redis_shared_state_key)
end
end
+
+ # Returns the TTL of the Redis key.
+ #
+ # This method will return `nil` if no TTL could be obtained.
+ def ttl
+ Gitlab::Redis::SharedState.with do |redis|
+ ttl = redis.ttl(@redis_shared_state_key)
+
+ ttl if ttl.positive?
+ end
+ end
end
end
diff --git a/lib/gitlab/file_detector.rb b/lib/gitlab/file_detector.rb
index a8cb7fc3fe7..cc2638172ec 100644
--- a/lib/gitlab/file_detector.rb
+++ b/lib/gitlab/file_detector.rb
@@ -6,31 +6,33 @@ module Gitlab
module FileDetector
PATTERNS = {
# Project files
- readme: /\Areadme/i,
- changelog: /\A(changelog|history|changes|news)/i,
- license: /\A(licen[sc]e|copying)(\..+|\z)/i,
- contributing: /\Acontributing/i,
+ readme: %r{\Areadme[^/]*\z}i,
+ changelog: %r{\A(changelog|history|changes|news)[^/]*\z}i,
+ license: %r{\A(licen[sc]e|copying)(\.[^/]+)?\z}i,
+ contributing: %r{\Acontributing[^/]*\z}i,
version: 'version',
avatar: /\Alogo\.(png|jpg|gif)\z/,
+ issue_template: %r{\A\.gitlab/issue_templates/[^/]+\.md\z},
+ merge_request_template: %r{\A\.gitlab/merge_request_templates/[^/]+\.md\z},
# Configuration files
gitignore: '.gitignore',
koding: '.koding.yml',
gitlab_ci: '.gitlab-ci.yml',
- route_map: 'route-map.yml',
+ route_map: '.gitlab/route-map.yml',
# Dependency files
- cartfile: /\ACartfile/,
+ cartfile: %r{\ACartfile[^/]*\z},
composer_json: 'composer.json',
gemfile: /\A(Gemfile|gems\.rb)\z/,
gemfile_lock: 'Gemfile.lock',
- gemspec: /\.gemspec\z/,
+ gemspec: %r{\A[^/]*\.gemspec\z},
godeps_json: 'Godeps.json',
package_json: 'package.json',
podfile: 'Podfile',
- podspec_json: /\.podspec\.json\z/,
- podspec: /\.podspec\z/,
- requirements_txt: /requirements\.txt\z/,
+ podspec_json: %r{\A[^/]*\.podspec\.json\z},
+ podspec: %r{\A[^/]*\.podspec\z},
+ requirements_txt: %r{\A[^/]*requirements\.txt\z},
yarn_lock: 'yarn.lock'
}.freeze
@@ -63,13 +65,11 @@ module Gitlab
# type_of('README.md') # => :readme
# type_of('VERSION') # => :version
def self.type_of(path)
- name = File.basename(path)
-
PATTERNS.each do |type, search|
did_match = if search.is_a?(Regexp)
- name =~ search
+ path =~ search
else
- name.casecmp(search) == 0
+ path.casecmp(search) == 0
end
return type if did_match
diff --git a/lib/gitlab/file_finder.rb b/lib/gitlab/file_finder.rb
index 10ffc345bd5..8c082c0c336 100644
--- a/lib/gitlab/file_finder.rb
+++ b/lib/gitlab/file_finder.rb
@@ -28,7 +28,7 @@ module Gitlab
def find_by_content(query)
results = repository.search_files_by_content(query, ref).first(BATCH_SIZE)
- results.map { |result| Gitlab::ProjectSearchResults.parse_search_result(result) }
+ results.map { |result| Gitlab::ProjectSearchResults.parse_search_result(result, project) }
end
def find_by_filename(query, except: [])
@@ -45,7 +45,8 @@ module Gitlab
basename: File.basename(blob.path),
ref: ref,
startline: 1,
- data: blob.data
+ data: blob.data,
+ project: project
)
end
end
diff --git a/lib/gitlab/fogbugz_import/client.rb b/lib/gitlab/fogbugz_import/client.rb
index 2152182b37f..acb000e3e23 100644
--- a/lib/gitlab/fogbugz_import/client.rb
+++ b/lib/gitlab/fogbugz_import/client.rb
@@ -45,6 +45,7 @@ module Gitlab
project_name = repo(project_id).name
res = @api.command(:search, q: "project:'#{project_name}'", cols: 'ixPersonAssignedTo,ixPersonOpenedBy,ixPersonClosedBy,sStatus,sPriority,sCategory,fOpen,sTitle,sLatestTextSummary,dtOpened,dtClosed,dtResolved,dtLastUpdated,events')
return [] unless res['cases']['count'].to_i > 0
+
res['cases']['case']
end
diff --git a/lib/gitlab/fogbugz_import/importer.rb b/lib/gitlab/fogbugz_import/importer.rb
index 3dcee681c72..8953bc8c148 100644
--- a/lib/gitlab/fogbugz_import/importer.rb
+++ b/lib/gitlab/fogbugz_import/importer.rb
@@ -18,6 +18,7 @@ module Gitlab
def execute
return true unless repo.valid?
+
client = Gitlab::FogbugzImport::Client.new(token: fb_session[:token], uri: fb_session[:uri])
@cases = client.cases(@repo.id.to_i)
@@ -111,6 +112,7 @@ module Gitlab
[bug['sCategory'], bug['sPriority']].each do |label|
unless label.blank?
labels << label
+
unless @known_labels.include?(label)
create_label(label)
@known_labels << label
@@ -206,6 +208,7 @@ module Gitlab
def format_content(raw_content)
return raw_content if raw_content.nil?
+
linkify_issues(escape_for_markdown(raw_content))
end
@@ -263,6 +266,7 @@ module Gitlab
if content.blank?
content = '*(No description has been entered for this issue)*'
end
+
body << content
body.join("\n\n")
@@ -276,6 +280,7 @@ module Gitlab
if content.blank?
content = "*(No comment has been entered for this change)*"
end
+
body << content
if updates.any?
diff --git a/lib/gitlab/gfm/uploads_rewriter.rb b/lib/gitlab/gfm/uploads_rewriter.rb
index 8fab5489616..1b74f735679 100644
--- a/lib/gitlab/gfm/uploads_rewriter.rb
+++ b/lib/gitlab/gfm/uploads_rewriter.rb
@@ -27,7 +27,7 @@ module Gitlab
with_link_in_tmp_dir(file.file) do |open_tmp_file|
new_uploader.store!(open_tmp_file)
end
- new_uploader.to_markdown
+ new_uploader.markdown_link
end
end
@@ -46,7 +46,7 @@ module Gitlab
private
def find_file(project, secret, file)
- uploader = FileUploader.new(project, secret)
+ uploader = FileUploader.new(project, secret: secret)
uploader.retrieve_from_store!(file)
uploader.file
end
diff --git a/lib/gitlab/git.rb b/lib/gitlab/git.rb
index c78fe63f9b5..d4e893b881c 100644
--- a/lib/gitlab/git.rb
+++ b/lib/gitlab/git.rb
@@ -6,12 +6,13 @@ module Gitlab
CommandError = Class.new(StandardError)
CommitError = Class.new(StandardError)
+ OSError = Class.new(StandardError)
class << self
include Gitlab::EncodingHelper
def ref_name(ref)
- encode_utf8(ref).sub(/\Arefs\/(tags|heads|remotes)\//, '')
+ encode!(ref).sub(%r{\Arefs/(tags|heads|remotes)/}, '')
end
def branch_name(ref)
@@ -66,6 +67,22 @@ module Gitlab
end
end
end
+
+ def diff_line_code(file_path, new_line_position, old_line_position)
+ "#{Digest::SHA1.hexdigest(file_path)}_#{old_line_position}_#{new_line_position}"
+ end
+
+ def shas_eql?(sha1, sha2)
+ return false if sha1.nil? || sha2.nil?
+ return false unless sha1.class == sha2.class
+
+ # If either of the shas is below the minimum length, we cannot be sure
+ # that they actually refer to the same commit because of hash collision.
+ length = [sha1.length, sha2.length].min
+ return false if length < Gitlab::Git::Commit::MIN_SHA_LENGTH
+
+ sha1[0, length] == sha2[0, length]
+ end
end
end
end
diff --git a/lib/gitlab/git/attributes_at_ref_parser.rb b/lib/gitlab/git/attributes_at_ref_parser.rb
new file mode 100644
index 00000000000..26b5bd520d5
--- /dev/null
+++ b/lib/gitlab/git/attributes_at_ref_parser.rb
@@ -0,0 +1,14 @@
+module Gitlab
+ module Git
+ # Parses root .gitattributes file at a given ref
+ class AttributesAtRefParser
+ delegate :attributes, to: :@parser
+
+ def initialize(repository, ref)
+ blob = repository.blob_at(ref, '.gitattributes')
+
+ @parser = AttributesParser.new(blob&.data)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/attributes.rb b/lib/gitlab/git/attributes_parser.rb
index 2d20cd473a7..d8aeabb6cba 100644
--- a/lib/gitlab/git/attributes.rb
+++ b/lib/gitlab/git/attributes_parser.rb
@@ -1,42 +1,26 @@
-# Gitaly note: JV: not sure what to make of this class. Why does it use
-# the full disk path of the repository to look up attributes This is
-# problematic in Gitaly, because Gitaly hides the full disk path to the
-# repository from gitlab-ce.
-
module Gitlab
module Git
# Class for parsing Git attribute files and extracting the attributes for
# file patterns.
- #
- # Unlike Rugged this parser only needs a single IO call (a call to `open`),
- # vastly reducing the time spent in extracting attributes.
- #
- # This class _only_ supports parsing the attributes file located at
- # `$GIT_DIR/info/attributes` as GitLab doesn't use any other files
- # (`.gitattributes` is copied to this particular path).
- #
- # Basic usage:
- #
- # attributes = Gitlab::Git::Attributes.new(some_repo.path)
- #
- # attributes.attributes('README.md') # => { "eol" => "lf }
- class Attributes
- # path - The path to the Git repository.
- def initialize(path)
- @path = File.expand_path(path)
- @patterns = nil
+ class AttributesParser
+ def initialize(attributes_data)
+ @data = attributes_data || ""
+
+ if @data.is_a?(File)
+ @patterns = parse_file
+ end
end
# Returns all the Git attributes for the given path.
#
- # path - A path to a file for which to get the attributes.
+ # file_path - A path to a file for which to get the attributes.
#
# Returns a Hash.
- def attributes(path)
- full_path = File.join(@path, path)
+ def attributes(file_path)
+ absolute_path = File.join('/', file_path)
patterns.each do |pattern, attrs|
- return attrs if File.fnmatch?(pattern, full_path)
+ return attrs if File.fnmatch?(pattern, absolute_path)
end
{}
@@ -98,16 +82,10 @@ module Gitlab
# Iterates over every line in the attributes file.
def each_line
- full_path = File.join(@path, 'info/attributes')
+ @data.each_line do |line|
+ break unless line.valid_encoding?
- return unless File.exist?(full_path)
-
- File.open(full_path, 'r') do |handle|
- handle.each_line do |line|
- break unless line.valid_encoding?
-
- yield line.strip
- end
+ yield line.strip
end
end
@@ -125,7 +103,8 @@ module Gitlab
parsed = attrs ? parse_attributes(attrs) : {}
- pairs << [File.join(@path, pattern), parsed]
+ absolute_pattern = File.join('/', pattern)
+ pairs << [absolute_pattern, parsed]
end
# Newer entries take precedence over older entries.
diff --git a/lib/gitlab/git/blame.rb b/lib/gitlab/git/blame.rb
index 31effdba292..6d6ed065f79 100644
--- a/lib/gitlab/git/blame.rb
+++ b/lib/gitlab/git/blame.rb
@@ -42,9 +42,7 @@ module Gitlab
end
def load_blame_by_shelling_out
- cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{@repo.path} blame -p #{@sha} -- #{@path})
- # Read in binary mode to ensure ASCII-8BIT
- IO.popen(cmd, 'rb') {|io| io.read }
+ @repo.shell_blame(@sha, @path)
end
def process_raw_blame(output)
diff --git a/lib/gitlab/git/blob.rb b/lib/gitlab/git/blob.rb
index 8d96826f6ee..eabcf46cf58 100644
--- a/lib/gitlab/git/blob.rb
+++ b/lib/gitlab/git/blob.rb
@@ -12,6 +12,12 @@ module Gitlab
# blob data should use load_all_data!.
MAX_DATA_DISPLAY_SIZE = 10.megabytes
+ # These limits are used as a heuristic to ignore files which can't be LFS
+ # pointers. The format of these is described in
+ # https://github.com/git-lfs/git-lfs/blob/master/docs/spec.md#the-pointer
+ LFS_POINTER_MIN_SIZE = 120.bytes
+ LFS_POINTER_MAX_SIZE = 200.bytes
+
attr_accessor :name, :path, :size, :data, :mode, :id, :commit_id, :loaded_size, :binary
class << self
@@ -28,16 +34,9 @@ module Gitlab
def raw(repository, sha)
Gitlab::GitalyClient.migrate(:git_blob_raw) do |is_enabled|
if is_enabled
- Gitlab::GitalyClient::BlobService.new(repository).get_blob(oid: sha, limit: MAX_DATA_DISPLAY_SIZE)
+ repository.gitaly_blob_client.get_blob(oid: sha, limit: MAX_DATA_DISPLAY_SIZE)
else
- blob = repository.lookup(sha)
-
- new(
- id: blob.oid,
- size: blob.size,
- data: blob.content(MAX_DATA_DISPLAY_SIZE),
- binary: blob.binary?
- )
+ rugged_raw(repository, sha, limit: MAX_DATA_DISPLAY_SIZE)
end
end
end
@@ -50,10 +49,33 @@ module Gitlab
# Keep in mind that this method may allocate a lot of memory. It is up
# to the caller to limit the number of blobs and blob_size_limit.
#
- def batch(repository, blob_references, blob_size_limit: nil)
- blob_size_limit ||= MAX_DATA_DISPLAY_SIZE
- blob_references.map do |sha, path|
- find_by_rugged(repository, sha, path, limit: blob_size_limit)
+ # Gitaly migration issue: https://gitlab.com/gitlab-org/gitaly/issues/798
+ def batch(repository, blob_references, blob_size_limit: MAX_DATA_DISPLAY_SIZE)
+ Gitlab::GitalyClient.migrate(:list_blobs_by_sha_path) do |is_enabled|
+ if is_enabled
+ repository.gitaly_blob_client.get_blobs(blob_references, blob_size_limit).to_a
+ else
+ blob_references.map do |sha, path|
+ find_by_rugged(repository, sha, path, limit: blob_size_limit)
+ end
+ end
+ end
+ end
+
+ # Find LFS blobs given an array of sha ids
+ # Returns array of Gitlab::Git::Blob
+ # Does not guarantee blob data will be set
+ def batch_lfs_pointers(repository, blob_ids)
+ repository.gitaly_migrate(:batch_lfs_pointers) do |is_enabled|
+ if is_enabled
+ repository.gitaly_blob_client.batch_lfs_pointers(blob_ids.to_a)
+ else
+ blob_ids.lazy
+ .select { |sha| possible_lfs_blob?(repository, sha) }
+ .map { |sha| rugged_raw(repository, sha, limit: LFS_POINTER_MAX_SIZE) }
+ .select(&:lfs_pointer?)
+ .force
+ end
end
end
@@ -61,6 +83,10 @@ module Gitlab
EncodingHelper.detect_libgit2_binary?(data)
end
+ def size_could_be_lfs?(size)
+ size.between?(LFS_POINTER_MIN_SIZE, LFS_POINTER_MAX_SIZE)
+ end
+
private
# Recursive search of blob id by path
@@ -77,7 +103,7 @@ module Gitlab
def find_entry_by_path(repository, root_id, path)
root_tree = repository.lookup(root_id)
# Strip leading slashes
- path[/^\/*/] = ''
+ path[%r{^/*}] = ''
path_arr = path.split('/')
entry = root_tree.find do |entry|
@@ -88,6 +114,7 @@ module Gitlab
if path_arr.size > 1
return nil unless entry[:type] == :tree
+
path_arr.shift
find_entry_by_path(repository, entry[:oid], path_arr.join('/'))
else
@@ -106,13 +133,25 @@ module Gitlab
)
end
- def find_by_gitaly(repository, sha, path)
- path = path.sub(/\A\/*/, '')
+ def find_by_gitaly(repository, sha, path, limit: MAX_DATA_DISPLAY_SIZE)
+ return unless path
+
+ path = path.sub(%r{\A/*}, '')
path = '/' if path.empty?
name = File.basename(path)
- entry = Gitlab::GitalyClient::CommitService.new(repository).tree_entry(sha, path, MAX_DATA_DISPLAY_SIZE)
+
+ # Gitaly will think that setting the limit to 0 means unlimited, while
+ # the client might only need the metadata and thus set the limit to 0.
+ # In this method we'll then set the limit to 1, but clear the byte of data
+ # that we got back so for the outside world it looks like the limit was
+ # actually 0.
+ req_limit = limit == 0 ? 1 : limit
+
+ entry = Gitlab::GitalyClient::CommitService.new(repository).tree_entry(sha, path, req_limit)
return unless entry
+ entry.data = "" if limit == 0
+
case entry.type
when :COMMIT
new(
@@ -138,8 +177,10 @@ module Gitlab
end
def find_by_rugged(repository, sha, path, limit:)
- commit = repository.lookup(sha)
- root_tree = commit.tree
+ return unless path
+
+ rugged_commit = repository.lookup(sha)
+ root_tree = rugged_commit.tree
blob_entry = find_entry_by_path(repository, root_tree.oid, path)
@@ -164,6 +205,31 @@ module Gitlab
)
end
end
+ rescue Rugged::ReferenceError
+ nil
+ end
+
+ def rugged_raw(repository, sha, limit:)
+ blob = repository.lookup(sha)
+
+ return unless blob.is_a?(Rugged::Blob)
+
+ new(
+ id: blob.oid,
+ size: blob.size,
+ data: blob.content(limit),
+ binary: blob.binary?
+ )
+ end
+
+ # Efficient lookup to determine if object size
+ # and type make it a possible LFS blob without loading
+ # blob content into memory with repository.lookup(sha)
+ def possible_lfs_blob?(repository, sha)
+ object_header = repository.rugged.read_header(sha)
+
+ object_header[:type] == :blob &&
+ size_could_be_lfs?(object_header[:len])
end
end
@@ -172,9 +238,9 @@ module Gitlab
self.__send__("#{key}=", options[key.to_sym]) # rubocop:disable GitlabSecurity/PublicSend
end
- @loaded_all_data = false
# Retain the actual size before it is encoded
@loaded_size = @data.bytesize if @data
+ @loaded_all_data = @loaded_size == size
end
def binary?
@@ -189,12 +255,17 @@ module Gitlab
# memory as a Ruby string.
def load_all_data!(repository)
return if @data == '' # don't mess with submodule blobs
- return @data if @loaded_all_data
- Gitlab::GitalyClient.migrate(:git_blob_load_all_data) do |is_enabled|
- @data = begin
+ # Even if we return early, recalculate wether this blob is binary in
+ # case a blob was initialized as text but the full data isn't
+ @binary = nil
+
+ return if @loaded_all_data
+
+ @data = Gitlab::GitalyClient.migrate(:git_blob_load_all_data) do |is_enabled|
+ begin
if is_enabled
- Gitlab::GitalyClient::BlobService.new(repository).get_blob(oid: id, limit: -1).data
+ repository.gitaly_blob_client.get_blob(oid: id, limit: -1).data
else
repository.lookup(id).content
end
@@ -203,7 +274,6 @@ module Gitlab
@loaded_all_data = true
@loaded_size = @data.bytesize
- @binary = nil
end
def name
@@ -224,7 +294,7 @@ module Gitlab
# size
# see https://github.com/github/git-lfs/blob/v1.1.0/docs/spec.md#the-pointer
def lfs_pointer?
- has_lfs_version_key? && lfs_oid.present? && lfs_size.present?
+ self.class.size_could_be_lfs?(size) && has_lfs_version_key? && lfs_oid.present? && lfs_size.present?
end
def lfs_oid
diff --git a/lib/gitlab/git/branch.rb b/lib/gitlab/git/branch.rb
index c53882787f1..6351cfb83e3 100644
--- a/lib/gitlab/git/branch.rb
+++ b/lib/gitlab/git/branch.rb
@@ -1,11 +1,31 @@
-# Gitaly note: JV: no RPC's here.
-
module Gitlab
module Git
class Branch < Ref
+ STALE_BRANCH_THRESHOLD = 3.months
+
+ def self.find(repo, branch_name)
+ if branch_name.is_a?(Gitlab::Git::Branch)
+ branch_name
+ else
+ repo.find_branch(branch_name)
+ end
+ end
+
def initialize(repository, name, target, target_commit)
super(repository, name, target, target_commit)
end
+
+ def active?
+ self.dereferenced_target.committed_date >= STALE_BRANCH_THRESHOLD.ago
+ end
+
+ def stale?
+ !active?
+ end
+
+ def state
+ active? ? :active : :stale
+ end
end
end
end
diff --git a/lib/gitlab/git/commit.rb b/lib/gitlab/git/commit.rb
index 1957c254c28..594b6a9cbc5 100644
--- a/lib/gitlab/git/commit.rb
+++ b/lib/gitlab/git/commit.rb
@@ -6,6 +6,7 @@ module Gitlab
attr_accessor :raw_commit, :head
+ MIN_SHA_LENGTH = 7
SERIALIZE_KEYS = [
:id, :message, :parent_ids,
:authored_date, :author_name, :author_email,
@@ -14,8 +15,6 @@ module Gitlab
attr_accessor *SERIALIZE_KEYS # rubocop:disable Lint/AmbiguousOperator
- delegate :tree, to: :rugged_commit
-
def ==(other)
return false unless other.is_a?(Gitlab::Git::Commit)
@@ -72,7 +71,8 @@ module Gitlab
decorate(repo, commit) if commit
rescue Rugged::ReferenceError, Rugged::InvalidError, Rugged::ObjectError,
- Gitlab::Git::CommandError, Gitlab::Git::Repository::NoRepository
+ Gitlab::Git::CommandError, Gitlab::Git::Repository::NoRepository,
+ Rugged::OdbError, Rugged::TreeError, ArgumentError
nil
end
@@ -212,14 +212,90 @@ module Gitlab
end
def shas_with_signatures(repository, shas)
- shas.select do |sha|
- begin
- Rugged::Commit.extract_signature(repository.rugged, sha)
- rescue Rugged::OdbError
- false
+ GitalyClient.migrate(:filter_shas_with_signatures) do |is_enabled|
+ if is_enabled
+ Gitlab::GitalyClient::CommitService.new(repository).filter_shas_with_signatures(shas)
+ else
+ shas.select do |sha|
+ begin
+ Rugged::Commit.extract_signature(repository.rugged, sha)
+ rescue Rugged::OdbError
+ false
+ end
+ end
+ end
+ end
+ end
+
+ # Only to be used when the object ids will not necessarily have a
+ # relation to each other. The last 10 commits for a branch for example,
+ # should go through .where
+ def batch_by_oid(repo, oids)
+ repo.gitaly_migrate(:list_commits_by_oid) do |is_enabled|
+ if is_enabled
+ repo.gitaly_commit_client.list_commits_by_oid(oids)
+ else
+ oids.map { |oid| find(repo, oid) }.compact
end
end
end
+
+ def extract_signature(repository, commit_id)
+ repository.gitaly_migrate(:extract_commit_signature) do |is_enabled|
+ if is_enabled
+ repository.gitaly_commit_client.extract_signature(commit_id)
+ else
+ rugged_extract_signature(repository, commit_id)
+ end
+ end
+ end
+
+ def extract_signature_lazily(repository, commit_id)
+ BatchLoader.for({ repository: repository, commit_id: commit_id }).batch do |items, loader|
+ items_by_repo = items.group_by { |i| i[:repository] }
+
+ items_by_repo.each do |repo, items|
+ commit_ids = items.map { |i| i[:commit_id] }
+
+ signatures = batch_signature_extraction(repository, commit_ids)
+
+ signatures.each do |commit_sha, signature_data|
+ loader.call({ repository: repository, commit_id: commit_sha }, signature_data)
+ end
+ end
+ end
+ end
+
+ def batch_signature_extraction(repository, commit_ids)
+ repository.gitaly_migrate(:extract_commit_signature_in_batch) do |is_enabled|
+ if is_enabled
+ gitaly_batch_signature_extraction(repository, commit_ids)
+ else
+ rugged_batch_signature_extraction(repository, commit_ids)
+ end
+ end
+ end
+
+ def gitaly_batch_signature_extraction(repository, commit_ids)
+ repository.gitaly_commit_client.get_commit_signatures(commit_ids)
+ end
+
+ def rugged_batch_signature_extraction(repository, commit_ids)
+ commit_ids.each_with_object({}) do |commit_id, signatures|
+ signature_data = rugged_extract_signature(repository, commit_id)
+ next unless signature_data
+
+ signatures[commit_id] = signature_data
+ end
+ end
+
+ def rugged_extract_signature(repository, commit_id)
+ begin
+ Rugged::Commit.extract_signature(repository.rugged, commit_id)
+ rescue Rugged::OdbError
+ nil
+ end
+ end
end
def initialize(repository, raw_commit, head = nil)
@@ -365,15 +441,6 @@ module Gitlab
end
end
- # Get a collection of Rugged::Reference objects for this commit.
- #
- # Ex.
- # commit.ref(repo)
- #
- def refs(repo)
- repo.refs_hash[id]
- end
-
# Get ref names collection
#
# Ex.
@@ -381,7 +448,7 @@ module Gitlab
#
def ref_names(repo)
refs(repo).map do |ref|
- ref.name.sub(%r{^refs/(heads|remotes|tags)/}, "")
+ ref.sub(%r{^refs/(heads|remotes|tags)/}, "")
end
end
@@ -417,6 +484,30 @@ module Gitlab
parent_ids.size > 1
end
+ def tree_entry(path)
+ @repository.gitaly_migrate(:commit_tree_entry) do |is_migrated|
+ if is_migrated
+ gitaly_tree_entry(path)
+ else
+ rugged_tree_entry(path)
+ end
+ end
+ end
+
+ def to_gitaly_commit
+ return raw_commit if raw_commit.is_a?(Gitaly::GitCommit)
+
+ message_split = raw_commit.message.split("\n", 2)
+ Gitaly::GitCommit.new(
+ id: raw_commit.oid,
+ subject: message_split[0] ? message_split[0].chomp.b : "",
+ body: raw_commit.message.b,
+ parent_ids: raw_commit.parent_ids,
+ author: gitaly_commit_author_from_rugged(raw_commit.author),
+ committer: gitaly_commit_author_from_rugged(raw_commit.committer)
+ )
+ end
+
private
def init_from_hash(hash)
@@ -456,12 +547,51 @@ module Gitlab
@committed_date = Time.at(commit.committer.date.seconds).utc
@committer_name = commit.committer.name.dup
@committer_email = commit.committer.email.dup
- @parent_ids = commit.parent_ids
+ @parent_ids = Array(commit.parent_ids)
end
def serialize_keys
SERIALIZE_KEYS
end
+
+ def gitaly_tree_entry(path)
+ # We're only interested in metadata, so limit actual data to 1 byte
+ # since Gitaly doesn't support "send no data" option.
+ entry = @repository.gitaly_commit_client.tree_entry(id, path, 1)
+ return unless entry
+
+ # To be compatible with the rugged format
+ entry = entry.to_h
+ entry.delete(:data)
+ entry[:name] = File.basename(path)
+ entry[:type] = entry[:type].downcase
+
+ entry
+ end
+
+ # Is this the same as Blob.find_entry_by_path ?
+ def rugged_tree_entry(path)
+ rugged_commit.tree.path(path)
+ rescue Rugged::TreeError
+ nil
+ end
+
+ def gitaly_commit_author_from_rugged(author_or_committer)
+ Gitaly::CommitAuthor.new(
+ name: author_or_committer[:name].b,
+ email: author_or_committer[:email].b,
+ date: Google::Protobuf::Timestamp.new(seconds: author_or_committer[:time].to_i)
+ )
+ end
+
+ # Get a collection of Gitlab::Git::Ref objects for this commit.
+ #
+ # Ex.
+ # commit.ref(repo)
+ #
+ def refs(repo)
+ repo.refs_hash[id]
+ end
end
end
end
diff --git a/lib/gitlab/git/commit_stats.rb b/lib/gitlab/git/commit_stats.rb
index 6bf49a0af18..8463b1eb794 100644
--- a/lib/gitlab/git/commit_stats.rb
+++ b/lib/gitlab/git/commit_stats.rb
@@ -34,13 +34,8 @@ module Gitlab
def rugged_stats(commit)
diff = commit.rugged_diff_from_parent
-
- diff.each_patch do |p|
- # TODO: Use the new Rugged convenience methods when they're released
- @additions += p.stat[0]
- @deletions += p.stat[1]
- @total += p.changes
- end
+ _files_changed, @additions, @deletions = diff.stat
+ @total = @additions + @deletions
end
end
end
diff --git a/lib/gitlab/git/conflict/file.rb b/lib/gitlab/git/conflict/file.rb
new file mode 100644
index 00000000000..2a9cf10a068
--- /dev/null
+++ b/lib/gitlab/git/conflict/file.rb
@@ -0,0 +1,88 @@
+module Gitlab
+ module Git
+ module Conflict
+ class File
+ attr_reader :their_path, :our_path, :our_mode, :repository, :commit_oid
+
+ attr_accessor :content
+
+ def initialize(repository, commit_oid, conflict, content)
+ @repository = repository
+ @commit_oid = commit_oid
+ @their_path = conflict[:theirs][:path]
+ @our_path = conflict[:ours][:path]
+ @our_mode = conflict[:ours][:mode]
+ @content = content
+ end
+
+ def lines
+ return @lines if defined?(@lines)
+
+ begin
+ @type = 'text'
+ @lines = Gitlab::Git::Conflict::Parser.parse(content,
+ our_path: our_path,
+ their_path: their_path)
+ rescue Gitlab::Git::Conflict::Parser::ParserError
+ @type = 'text-editor'
+ @lines = nil
+ end
+ end
+
+ def type
+ lines unless @type
+
+ @type.inquiry
+ end
+
+ def our_blob
+ # REFACTOR NOTE: the source of `commit_oid` used to be
+ # `merge_request.diff_refs.head_sha`. Instead of passing this value
+ # around the new lib structure, I decided to use `@commit_oid` which is
+ # equivalent to `merge_request.source_branch_head.raw.rugged_commit.oid`.
+ # That is what `merge_request.diff_refs.head_sha` is equivalent to when
+ # `merge_request` is not persisted (see `MergeRequest#diff_head_commit`).
+ # I think using the same oid is more consistent anyways, but if Conflicts
+ # start breaking, the change described above is a good place to look at.
+ @our_blob ||= repository.blob_at(@commit_oid, our_path)
+ end
+
+ def line_code(line)
+ Gitlab::Git.diff_line_code(our_path, line[:line_new], line[:line_old])
+ end
+
+ def resolve_lines(resolution)
+ section_id = nil
+
+ lines.map do |line|
+ unless line[:type]
+ section_id = nil
+ next line
+ end
+
+ section_id ||= line_code(line)
+
+ case resolution[section_id]
+ when 'head'
+ next unless line[:type] == 'new'
+ when 'origin'
+ next unless line[:type] == 'old'
+ else
+ raise Gitlab::Git::Conflict::Resolver::ResolutionError, "Missing resolution for section ID: #{section_id}"
+ end
+
+ line
+ end.compact
+ end
+
+ def resolve_content(resolution)
+ if resolution == content
+ raise Gitlab::Git::Conflict::Resolver::ResolutionError, "Resolved content has no changes for file #{our_path}"
+ end
+
+ resolution
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/conflict/parser.rb b/lib/gitlab/git/conflict/parser.rb
new file mode 100644
index 00000000000..3effa9d2d31
--- /dev/null
+++ b/lib/gitlab/git/conflict/parser.rb
@@ -0,0 +1,91 @@
+module Gitlab
+ module Git
+ module Conflict
+ class Parser
+ UnresolvableError = Class.new(StandardError)
+ UnmergeableFile = Class.new(UnresolvableError)
+ UnsupportedEncoding = Class.new(UnresolvableError)
+
+ # Recoverable errors - the conflict can be resolved in an editor, but not with
+ # sections.
+ ParserError = Class.new(StandardError)
+ UnexpectedDelimiter = Class.new(ParserError)
+ MissingEndDelimiter = Class.new(ParserError)
+
+ class << self
+ def parse(text, our_path:, their_path:, parent_file: nil)
+ validate_text!(text)
+
+ line_obj_index = 0
+ line_old = 1
+ line_new = 1
+ type = nil
+ lines = []
+ conflict_start = "<<<<<<< #{our_path}"
+ conflict_middle = '======='
+ conflict_end = ">>>>>>> #{their_path}"
+
+ text.each_line.map do |line|
+ full_line = line.delete("\n")
+
+ if full_line == conflict_start
+ validate_delimiter!(type.nil?)
+
+ type = 'new'
+ elsif full_line == conflict_middle
+ validate_delimiter!(type == 'new')
+
+ type = 'old'
+ elsif full_line == conflict_end
+ validate_delimiter!(type == 'old')
+
+ type = nil
+ elsif line[0] == '\\'
+ type = 'nonewline'
+ lines << {
+ full_line: full_line,
+ type: type,
+ line_obj_index: line_obj_index,
+ line_old: line_old,
+ line_new: line_new
+ }
+ else
+ lines << {
+ full_line: full_line,
+ type: type,
+ line_obj_index: line_obj_index,
+ line_old: line_old,
+ line_new: line_new
+ }
+
+ line_old += 1 if type != 'new'
+ line_new += 1 if type != 'old'
+
+ line_obj_index += 1
+ end
+ end
+
+ raise MissingEndDelimiter unless type.nil?
+
+ lines
+ end
+
+ private
+
+ def validate_text!(text)
+ raise UnmergeableFile if text.blank? # Typically a binary file
+ raise UnmergeableFile if text.length > 200.kilobytes
+
+ text.force_encoding('UTF-8')
+
+ raise UnsupportedEncoding unless text.valid_encoding?
+ end
+
+ def validate_delimiter!(condition)
+ raise UnexpectedDelimiter unless condition
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/conflict/resolution.rb b/lib/gitlab/git/conflict/resolution.rb
new file mode 100644
index 00000000000..ab9be683e15
--- /dev/null
+++ b/lib/gitlab/git/conflict/resolution.rb
@@ -0,0 +1,15 @@
+module Gitlab
+ module Git
+ module Conflict
+ class Resolution
+ attr_reader :user, :files, :commit_message
+
+ def initialize(user, files, commit_message)
+ @user = user
+ @files = files
+ @commit_message = commit_message
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/conflict/resolver.rb b/lib/gitlab/git/conflict/resolver.rb
new file mode 100644
index 00000000000..07b7e811a34
--- /dev/null
+++ b/lib/gitlab/git/conflict/resolver.rb
@@ -0,0 +1,118 @@
+module Gitlab
+ module Git
+ module Conflict
+ class Resolver
+ ConflictSideMissing = Class.new(StandardError)
+ ResolutionError = Class.new(StandardError)
+
+ def initialize(target_repository, our_commit_oid, their_commit_oid)
+ @target_repository = target_repository
+ @our_commit_oid = our_commit_oid
+ @their_commit_oid = their_commit_oid
+ end
+
+ def conflicts
+ @conflicts ||= begin
+ @target_repository.gitaly_migrate(:conflicts_list_conflict_files) do |is_enabled|
+ if is_enabled
+ gitaly_conflicts_client(@target_repository).list_conflict_files.to_a
+ else
+ rugged_list_conflict_files
+ end
+ end
+ end
+ rescue GRPC::FailedPrecondition => e
+ raise Gitlab::Git::Conflict::Resolver::ConflictSideMissing.new(e.message)
+ rescue Rugged::OdbError, GRPC::BadStatus => e
+ raise Gitlab::Git::CommandError.new(e)
+ end
+
+ def resolve_conflicts(source_repository, resolution, source_branch:, target_branch:)
+ source_repository.gitaly_migrate(:conflicts_resolve_conflicts) do |is_enabled|
+ if is_enabled
+ gitaly_conflicts_client(source_repository).resolve_conflicts(@target_repository, resolution, source_branch, target_branch)
+ else
+ rugged_resolve_conflicts(source_repository, resolution, source_branch, target_branch)
+ end
+ end
+ end
+
+ def conflict_for_path(conflicts, old_path, new_path)
+ conflicts.find do |conflict|
+ conflict.their_path == old_path && conflict.our_path == new_path
+ end
+ end
+
+ private
+
+ def conflict_files(repository, index)
+ index.conflicts.map do |conflict|
+ raise ConflictSideMissing unless conflict[:theirs] && conflict[:ours]
+
+ Gitlab::Git::Conflict::File.new(
+ repository,
+ @our_commit_oid,
+ conflict,
+ index.merge_file(conflict[:ours][:path])[:data]
+ )
+ end
+ end
+
+ def gitaly_conflicts_client(repository)
+ repository.gitaly_conflicts_client(@our_commit_oid, @their_commit_oid)
+ end
+
+ def write_resolved_file_to_index(repository, index, file, params)
+ if params[:sections]
+ resolved_lines = file.resolve_lines(params[:sections])
+ new_file = resolved_lines.map { |line| line[:full_line] }.join("\n")
+
+ new_file << "\n" if file.our_blob.data.end_with?("\n")
+ elsif params[:content]
+ new_file = file.resolve_content(params[:content])
+ end
+
+ our_path = file.our_path
+
+ oid = repository.rugged.write(new_file, :blob)
+ index.add(path: our_path, oid: oid, mode: file.our_mode)
+ index.conflict_remove(our_path)
+ end
+
+ def rugged_list_conflict_files
+ target_index = @target_repository.rugged.merge_commits(@our_commit_oid, @their_commit_oid)
+
+ # We don't need to do `with_repo_branch_commit` here, because the target
+ # project always fetches source refs when creating merge request diffs.
+ conflict_files(@target_repository, target_index)
+ end
+
+ def rugged_resolve_conflicts(source_repository, resolution, source_branch, target_branch)
+ source_repository.with_repo_branch_commit(@target_repository, target_branch) do
+ index = source_repository.rugged.merge_commits(@our_commit_oid, @their_commit_oid)
+ conflicts = conflict_files(source_repository, index)
+
+ resolution.files.each do |file_params|
+ conflict_file = conflict_for_path(conflicts, file_params[:old_path], file_params[:new_path])
+
+ write_resolved_file_to_index(source_repository, index, conflict_file, file_params)
+ end
+
+ unless index.conflicts.empty?
+ missing_files = index.conflicts.map { |file| file[:ours][:path] }
+
+ raise ResolutionError, "Missing resolutions for the following files: #{missing_files.join(', ')}"
+ end
+
+ commit_params = {
+ message: resolution.commit_message,
+ parents: [@our_commit_oid, @their_commit_oid]
+ }
+
+ source_repository.commit_index(resolution.user, source_branch, index, commit_params)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/diff.rb b/lib/gitlab/git/diff.rb
index a23c8cf0dd1..a203587aec1 100644
--- a/lib/gitlab/git/diff.rb
+++ b/lib/gitlab/git/diff.rb
@@ -24,41 +24,13 @@ module Gitlab
SERIALIZE_KEYS = %i(diff new_path old_path a_mode b_mode new_file renamed_file deleted_file too_large).freeze
- class << self
- # The maximum size of a diff to display.
- def size_limit
- if RequestStore.active?
- RequestStore['gitlab_git_diff_size_limit'] ||= find_size_limit
- else
- find_size_limit
- end
- end
-
- # The maximum size before a diff is collapsed.
- def collapse_limit
- if RequestStore.active?
- RequestStore['gitlab_git_diff_collapse_limit'] ||= find_collapse_limit
- else
- find_collapse_limit
- end
- end
-
- def find_size_limit
- if Feature.enabled?('gitlab_git_diff_size_limit_increase')
- 200.kilobytes
- else
- 100.kilobytes
- end
- end
+ # The maximum size of a diff to display.
+ SIZE_LIMIT = 100.kilobytes
- def find_collapse_limit
- if Feature.enabled?('gitlab_git_diff_size_limit_increase')
- 100.kilobytes
- else
- 10.kilobytes
- end
- end
+ # The maximum size before a diff is collapsed.
+ COLLAPSE_LIMIT = 10.kilobytes
+ class << self
def between(repo, head, base, options = {}, *paths)
straight = options.delete(:straight) || false
@@ -72,7 +44,7 @@ module Gitlab
# branch1...branch2) From the git documentation:
# "git diff A...B" is equivalent to "git diff
# $(git-merge-base A B) B"
- repo.merge_base_commit(head, base)
+ repo.merge_base(head, base)
end
options ||= {}
@@ -172,7 +144,7 @@ module Gitlab
def too_large?
if @too_large.nil?
- @too_large = @diff.bytesize >= self.class.size_limit
+ @too_large = @diff.bytesize >= SIZE_LIMIT
else
@too_large
end
@@ -190,7 +162,7 @@ module Gitlab
def collapsed?
return @collapsed if defined?(@collapsed)
- @collapsed = !expanded && @diff.bytesize >= self.class.collapse_limit
+ @collapsed = !expanded && @diff.bytesize >= COLLAPSE_LIMIT
end
def collapse!
@@ -206,6 +178,10 @@ module Gitlab
Diff.binary_message(@old_path, @new_path)
end
+ def has_binary_notice?
+ @diff.start_with?('Binary')
+ end
+
private
def init_from_rugged(rugged)
@@ -271,14 +247,14 @@ module Gitlab
hunk.each_line do |line|
size += line.content.bytesize
- if size >= self.class.size_limit
+ if size >= SIZE_LIMIT
too_large!
return true
end
end
end
- if !expanded && size >= self.class.collapse_limit
+ if !expanded && size >= COLLAPSE_LIMIT
collapse!
return true
end
diff --git a/lib/gitlab/git/env.rb b/lib/gitlab/git/env.rb
index f80193ac553..9d0b47a1a6d 100644
--- a/lib/gitlab/git/env.rb
+++ b/lib/gitlab/git/env.rb
@@ -11,9 +11,11 @@ module Gitlab
#
# This class is thread-safe via RequestStore.
class Env
- WHITELISTED_GIT_VARIABLES = %w[
+ WHITELISTED_VARIABLES = %w[
GIT_OBJECT_DIRECTORY
+ GIT_OBJECT_DIRECTORY_RELATIVE
GIT_ALTERNATE_OBJECT_DIRECTORIES
+ GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE
].freeze
def self.set(env)
@@ -28,12 +30,23 @@ module Gitlab
RequestStore.fetch(:gitlab_git_env) { {} }
end
+ def self.to_env_hash
+ env = {}
+
+ all.compact.each do |key, value|
+ value = value.join(File::PATH_SEPARATOR) if value.is_a?(Array)
+ env[key.to_s] = value
+ end
+
+ env
+ end
+
def self.[](key)
all[key]
end
def self.whitelist_git_env(env)
- env.select { |key, _| WHITELISTED_GIT_VARIABLES.include?(key.to_s) }.with_indifferent_access
+ env.select { |key, _| WHITELISTED_VARIABLES.include?(key.to_s) }.with_indifferent_access
end
end
end
diff --git a/lib/gitlab/git/gitlab_projects.rb b/lib/gitlab/git/gitlab_projects.rb
new file mode 100644
index 00000000000..5e1e22ae65c
--- /dev/null
+++ b/lib/gitlab/git/gitlab_projects.rb
@@ -0,0 +1,285 @@
+module Gitlab
+ module Git
+ class GitlabProjects
+ include Gitlab::Git::Popen
+ include Gitlab::Utils::StrongMemoize
+
+ ShardNameNotFoundError = Class.new(StandardError)
+
+ # Absolute path to directory where repositories are stored.
+ # Example: /home/git/repositories
+ attr_reader :shard_path
+
+ # Relative path is a directory name for repository with .git at the end.
+ # Example: gitlab-org/gitlab-test.git
+ attr_reader :repository_relative_path
+
+ # Absolute path to the repository.
+ # Example: /home/git/repositorities/gitlab-org/gitlab-test.git
+ attr_reader :repository_absolute_path
+
+ # This is the path at which the gitlab-shell hooks directory can be found.
+ # It's essential for integration between git and GitLab proper. All new
+ # repositories should have their hooks directory symlinked here.
+ attr_reader :global_hooks_path
+
+ attr_reader :logger
+
+ def initialize(shard_path, repository_relative_path, global_hooks_path:, logger:)
+ @shard_path = shard_path
+ @repository_relative_path = repository_relative_path
+
+ @logger = logger
+ @global_hooks_path = global_hooks_path
+ @repository_absolute_path = File.join(shard_path, repository_relative_path)
+ @output = StringIO.new
+ end
+
+ def output
+ io = @output.dup
+ io.rewind
+ io.read
+ end
+
+ # Import project via git clone --bare
+ # URL must be publicly cloneable
+ def import_project(source, timeout)
+ Gitlab::GitalyClient.migrate(:import_repository) do |is_enabled|
+ if is_enabled
+ gitaly_import_repository(source)
+ else
+ git_import_repository(source, timeout)
+ end
+ end
+ end
+
+ def fork_repository(new_shard_path, new_repository_relative_path)
+ Gitlab::GitalyClient.migrate(:fork_repository) do |is_enabled|
+ if is_enabled
+ gitaly_fork_repository(new_shard_path, new_repository_relative_path)
+ else
+ git_fork_repository(new_shard_path, new_repository_relative_path)
+ end
+ end
+ end
+
+ def fetch_remote(name, timeout, force:, tags:, ssh_key: nil, known_hosts: nil, prune: true)
+ tags_option = tags ? '--tags' : '--no-tags'
+
+ logger.info "Fetching remote #{name} for repository #{repository_absolute_path}."
+ cmd = %W(git fetch #{name} --quiet)
+ cmd << '--prune' if prune
+ cmd << '--force' if force
+ cmd << tags_option
+
+ setup_ssh_auth(ssh_key, known_hosts) do |env|
+ success = run_with_timeout(cmd, timeout, repository_absolute_path, env)
+
+ unless success
+ logger.error "Fetching remote #{name} for repository #{repository_absolute_path} failed."
+ end
+
+ success
+ end
+ end
+
+ def push_branches(remote_name, timeout, force, branch_names)
+ logger.info "Pushing branches from #{repository_absolute_path} to remote #{remote_name}: #{branch_names}"
+ cmd = %w(git push)
+ cmd << '--force' if force
+ cmd += %W(-- #{remote_name}).concat(branch_names)
+
+ success = run_with_timeout(cmd, timeout, repository_absolute_path)
+
+ unless success
+ logger.error("Pushing branches to remote #{remote_name} failed.")
+ end
+
+ success
+ end
+
+ def delete_remote_branches(remote_name, branch_names)
+ branches = branch_names.map { |branch_name| ":#{branch_name}" }
+
+ logger.info "Pushing deleted branches from #{repository_absolute_path} to remote #{remote_name}: #{branch_names}"
+ cmd = %W(git push -- #{remote_name}).concat(branches)
+
+ success = run(cmd, repository_absolute_path)
+
+ unless success
+ logger.error("Pushing deleted branches to remote #{remote_name} failed.")
+ end
+
+ success
+ end
+
+ protected
+
+ def run(*args)
+ output, exitstatus = popen(*args)
+ @output << output
+
+ exitstatus&.zero?
+ end
+
+ def run_with_timeout(*args)
+ output, exitstatus = popen_with_timeout(*args)
+ @output << output
+
+ exitstatus&.zero?
+ rescue Timeout::Error
+ @output.puts('Timed out')
+
+ false
+ end
+
+ def mask_password_in_url(url)
+ result = URI(url)
+ result.password = "*****" unless result.password.nil?
+ result.user = "*****" unless result.user.nil? # it's needed for oauth access_token
+ result
+ rescue
+ url
+ end
+
+ def remove_origin_in_repo
+ cmd = %w(git remote rm origin)
+ run(cmd, repository_absolute_path)
+ end
+
+ # Builds a small shell script that can be used to execute SSH with a set of
+ # custom options.
+ #
+ # Options are expanded as `'-oKey="Value"'`, so SSH will correctly interpret
+ # paths with spaces in them. We trust the user not to embed single or double
+ # quotes in the key or value.
+ def custom_ssh_script(options = {})
+ args = options.map { |k, v| %Q{'-o#{k}="#{v}"'} }.join(' ')
+
+ [
+ "#!/bin/sh",
+ "exec ssh #{args} \"$@\""
+ ].join("\n")
+ end
+
+ # Known hosts data and private keys can be passed to gitlab-shell in the
+ # environment. If present, this method puts them into temporary files, writes
+ # a script that can substitute as `ssh`, setting the options to respect those
+ # files, and yields: { "GIT_SSH" => "/tmp/myScript" }
+ def setup_ssh_auth(key, known_hosts)
+ options = {}
+
+ if key
+ key_file = Tempfile.new('gitlab-shell-key-file')
+ key_file.chmod(0o400)
+ key_file.write(key)
+ key_file.close
+
+ options['IdentityFile'] = key_file.path
+ options['IdentitiesOnly'] = 'yes'
+ end
+
+ if known_hosts
+ known_hosts_file = Tempfile.new('gitlab-shell-known-hosts')
+ known_hosts_file.chmod(0o400)
+ known_hosts_file.write(known_hosts)
+ known_hosts_file.close
+
+ options['StrictHostKeyChecking'] = 'yes'
+ options['UserKnownHostsFile'] = known_hosts_file.path
+ end
+
+ return yield({}) if options.empty?
+
+ script = Tempfile.new('gitlab-shell-ssh-wrapper')
+ script.chmod(0o755)
+ script.write(custom_ssh_script(options))
+ script.close
+
+ yield('GIT_SSH' => script.path)
+ ensure
+ key_file&.close!
+ known_hosts_file&.close!
+ script&.close!
+ end
+
+ private
+
+ def shard_name
+ strong_memoize(:shard_name) do
+ shard_name_from_shard_path(shard_path)
+ end
+ end
+
+ def shard_name_from_shard_path(shard_path)
+ Gitlab.config.repositories.storages.find { |_, info| info['path'] == shard_path }&.first ||
+ raise(ShardNameNotFoundError, "no shard found for path '#{shard_path}'")
+ end
+
+ def git_import_repository(source, timeout)
+ # Skip import if repo already exists
+ return false if File.exist?(repository_absolute_path)
+
+ masked_source = mask_password_in_url(source)
+
+ logger.info "Importing project from <#{masked_source}> to <#{repository_absolute_path}>."
+ cmd = %W(git clone --bare -- #{source} #{repository_absolute_path})
+
+ success = run_with_timeout(cmd, timeout, nil)
+
+ unless success
+ logger.error("Importing project from <#{masked_source}> to <#{repository_absolute_path}> failed.")
+ FileUtils.rm_rf(repository_absolute_path)
+ return false
+ end
+
+ Gitlab::Git::Repository.create_hooks(repository_absolute_path, global_hooks_path)
+
+ # The project was imported successfully.
+ # Remove the origin URL since it may contain password.
+ remove_origin_in_repo
+
+ true
+ end
+
+ def gitaly_import_repository(source)
+ raw_repository = Gitlab::Git::Repository.new(shard_name, repository_relative_path, nil)
+
+ Gitlab::GitalyClient::RepositoryService.new(raw_repository).import_repository(source)
+ true
+ rescue GRPC::BadStatus => e
+ @output << e.message
+ false
+ end
+
+ def git_fork_repository(new_shard_path, new_repository_relative_path)
+ from_path = repository_absolute_path
+ to_path = File.join(new_shard_path, new_repository_relative_path)
+
+ # The repository cannot already exist
+ if File.exist?(to_path)
+ logger.error "fork-repository failed: destination repository <#{to_path}> already exists."
+ return false
+ end
+
+ # Ensure the namepsace / hashed storage directory exists
+ FileUtils.mkdir_p(File.dirname(to_path), mode: 0770)
+
+ logger.info "Forking repository from <#{from_path}> to <#{to_path}>."
+ cmd = %W(git clone --bare --no-local -- #{from_path} #{to_path})
+
+ run(cmd, nil) && Gitlab::Git::Repository.create_hooks(to_path, global_hooks_path)
+ end
+
+ def gitaly_fork_repository(new_shard_path, new_repository_relative_path)
+ target_repository = Gitlab::Git::Repository.new(shard_name_from_shard_path(new_shard_path), new_repository_relative_path, nil)
+ raw_repository = Gitlab::Git::Repository.new(shard_name, repository_relative_path, nil)
+
+ Gitlab::GitalyClient::RepositoryService.new(target_repository).fork_repository(raw_repository)
+ rescue GRPC::BadStatus => e
+ logger.error "fork-repository failed: #{e.message}"
+ false
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/hook.rb b/lib/gitlab/git/hook.rb
index 208e4bbaf60..24f027d8da4 100644
--- a/lib/gitlab/git/hook.rb
+++ b/lib/gitlab/git/hook.rb
@@ -22,22 +22,22 @@ module Gitlab
File.exist?(path)
end
- def trigger(gl_id, oldrev, newrev, ref)
+ def trigger(gl_id, gl_username, oldrev, newrev, ref)
return [true, nil] unless exists?
Bundler.with_clean_env do
case name
when "pre-receive", "post-receive"
- call_receive_hook(gl_id, oldrev, newrev, ref)
+ call_receive_hook(gl_id, gl_username, oldrev, newrev, ref)
when "update"
- call_update_hook(gl_id, oldrev, newrev, ref)
+ call_update_hook(gl_id, gl_username, oldrev, newrev, ref)
end
end
end
private
- def call_receive_hook(gl_id, oldrev, newrev, ref)
+ def call_receive_hook(gl_id, gl_username, oldrev, newrev, ref)
changes = [oldrev, newrev, ref].join(" ")
exit_status = false
@@ -45,6 +45,7 @@ module Gitlab
vars = {
'GL_ID' => gl_id,
+ 'GL_USERNAME' => gl_username,
'PWD' => repo_path,
'GL_PROTOCOL' => GL_PROTOCOL,
'GL_REPOSITORY' => repository.gl_repository
@@ -80,11 +81,21 @@ module Gitlab
[exit_status, exit_message]
end
- def call_update_hook(gl_id, oldrev, newrev, ref)
- Dir.chdir(repo_path) do
- stdout, stderr, status = Open3.capture3({ 'GL_ID' => gl_id }, path, ref, oldrev, newrev)
- [status.success?, (stderr.presence || stdout).gsub(/\R/, "<br>").html_safe]
- end
+ def call_update_hook(gl_id, gl_username, oldrev, newrev, ref)
+ env = {
+ 'GL_ID' => gl_id,
+ 'GL_USERNAME' => gl_username,
+ 'PWD' => repo_path
+ }
+
+ options = {
+ chdir: repo_path
+ }
+
+ args = [ref, oldrev, newrev]
+
+ stdout, stderr, status = Open3.capture3(env, path, *args, options)
+ [status.success?, (stderr.presence || stdout).gsub(/\R/, "<br>").html_safe]
end
def retrieve_error_message(stderr, stdout)
diff --git a/lib/gitlab/git/hooks_service.rb b/lib/gitlab/git/hooks_service.rb
index ea8a87a1290..f302b852b35 100644
--- a/lib/gitlab/git/hooks_service.rb
+++ b/lib/gitlab/git/hooks_service.rb
@@ -5,12 +5,13 @@ module Gitlab
attr_accessor :oldrev, :newrev, :ref
- def execute(committer, repository, oldrev, newrev, ref)
- @repository = repository
- @gl_id = committer.gl_id
- @oldrev = oldrev
- @newrev = newrev
- @ref = ref
+ def execute(pusher, repository, oldrev, newrev, ref)
+ @repository = repository
+ @gl_id = pusher.gl_id
+ @gl_username = pusher.username
+ @oldrev = oldrev
+ @newrev = newrev
+ @ref = ref
%w(pre-receive update).each do |hook_name|
status, message = run_hook(hook_name)
@@ -29,7 +30,7 @@ module Gitlab
def run_hook(name)
hook = Gitlab::Git::Hook.new(name, @repository)
- hook.trigger(@gl_id, oldrev, newrev, ref)
+ hook.trigger(@gl_id, @gl_username, oldrev, newrev, ref)
end
end
end
diff --git a/lib/gitlab/git/index.rb b/lib/gitlab/git/index.rb
index db532600d1b..d94082a3e30 100644
--- a/lib/gitlab/git/index.rb
+++ b/lib/gitlab/git/index.rb
@@ -10,6 +10,7 @@ module Gitlab
DEFAULT_MODE = 0o100644
ACTIONS = %w(create create_dir update move delete).freeze
+ ACTION_OPTIONS = %i(file_path previous_path content encoding).freeze
attr_reader :repository, :raw_index
@@ -20,6 +21,11 @@ module Gitlab
delegate :read_tree, :get, to: :raw_index
+ def apply(action, options)
+ validate_action!(action)
+ public_send(action, options.slice(*ACTION_OPTIONS)) # rubocop:disable GitlabSecurity/PublicSend
+ end
+
def write_tree
raw_index.write_tree(repository.rugged)
end
@@ -140,6 +146,12 @@ module Gitlab
rescue Rugged::IndexError => e
raise IndexError, e.message
end
+
+ def validate_action!(action)
+ unless ACTIONS.include?(action.to_s)
+ raise ArgumentError, "Unknown action '#{action}'"
+ end
+ end
end
end
end
diff --git a/lib/gitlab/git/info_attributes.rb b/lib/gitlab/git/info_attributes.rb
new file mode 100644
index 00000000000..e79a440950b
--- /dev/null
+++ b/lib/gitlab/git/info_attributes.rb
@@ -0,0 +1,49 @@
+# Gitaly note: JV: not sure what to make of this class. Why does it use
+# the full disk path of the repository to look up attributes This is
+# problematic in Gitaly, because Gitaly hides the full disk path to the
+# repository from gitlab-ce.
+
+module Gitlab
+ module Git
+ # Parses gitattributes at `$GIT_DIR/info/attributes`
+ #
+ # Unlike Rugged this parser only needs a single IO call (a call to `open`),
+ # vastly reducing the time spent in extracting attributes.
+ #
+ # This class _only_ supports parsing the attributes file located at
+ # `$GIT_DIR/info/attributes` as GitLab doesn't use any other files
+ # (`.gitattributes` is copied to this particular path).
+ #
+ # Basic usage:
+ #
+ # attributes = Gitlab::Git::InfoAttributes.new(some_repo.path)
+ #
+ # attributes.attributes('README.md') # => { "eol" => "lf }
+ class InfoAttributes
+ delegate :attributes, :patterns, to: :parser
+
+ # path - The path to the Git repository.
+ def initialize(path)
+ @repo_path = File.expand_path(path)
+ end
+
+ def parser
+ @parser ||= begin
+ if File.exist?(attributes_path)
+ File.open(attributes_path, 'r') do |file_handle|
+ AttributesParser.new(file_handle)
+ end
+ else
+ AttributesParser.new("")
+ end
+ end
+ end
+
+ private
+
+ def attributes_path
+ @attributes_path ||= File.join(@repo_path, 'info/attributes')
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/lfs_changes.rb b/lib/gitlab/git/lfs_changes.rb
new file mode 100644
index 00000000000..b9e5cf258f4
--- /dev/null
+++ b/lib/gitlab/git/lfs_changes.rb
@@ -0,0 +1,52 @@
+module Gitlab
+ module Git
+ class LfsChanges
+ def initialize(repository, newrev)
+ @repository = repository
+ @newrev = newrev
+ end
+
+ def new_pointers(object_limit: nil, not_in: nil)
+ @repository.gitaly_migrate(:blob_get_new_lfs_pointers) do |is_enabled|
+ if is_enabled
+ @repository.gitaly_blob_client.get_new_lfs_pointers(@newrev, object_limit, not_in)
+ else
+ git_new_pointers(object_limit, not_in)
+ end
+ end
+ end
+
+ def all_pointers
+ @repository.gitaly_migrate(:blob_get_all_lfs_pointers) do |is_enabled|
+ if is_enabled
+ @repository.gitaly_blob_client.get_all_lfs_pointers(@newrev)
+ else
+ git_all_pointers
+ end
+ end
+ end
+
+ private
+
+ def git_new_pointers(object_limit, not_in)
+ @new_pointers ||= begin
+ rev_list.new_objects(not_in: not_in, require_path: true) do |object_ids|
+ object_ids = object_ids.take(object_limit) if object_limit
+
+ Gitlab::Git::Blob.batch_lfs_pointers(@repository, object_ids)
+ end
+ end
+ end
+
+ def git_all_pointers
+ rev_list.all_objects(require_path: true) do |object_ids|
+ Gitlab::Git::Blob.batch_lfs_pointers(@repository, object_ids)
+ end
+ end
+
+ def rev_list
+ Gitlab::Git::RevList.new(@repository, newrev: @newrev)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/lfs_pointer_file.rb b/lib/gitlab/git/lfs_pointer_file.rb
new file mode 100644
index 00000000000..da12ed7d125
--- /dev/null
+++ b/lib/gitlab/git/lfs_pointer_file.rb
@@ -0,0 +1,25 @@
+module Gitlab
+ module Git
+ class LfsPointerFile
+ def initialize(data)
+ @data = data
+ end
+
+ def pointer
+ @pointer ||= <<~FILE
+ version https://git-lfs.github.com/spec/v1
+ oid sha256:#{sha256}
+ size #{size}
+ FILE
+ end
+
+ def size
+ @size ||= @data.bytesize
+ end
+
+ def sha256
+ @sha256 ||= Digest::SHA256.hexdigest(@data)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/operation_service.rb b/lib/gitlab/git/operation_service.rb
index 786e2e7e8dc..280def182d5 100644
--- a/lib/gitlab/git/operation_service.rb
+++ b/lib/gitlab/git/operation_service.rb
@@ -3,9 +3,17 @@ module Gitlab
class OperationService
include Gitlab::Git::Popen
- WithBranchResult = Struct.new(:newrev, :repo_created, :branch_created) do
+ BranchUpdate = Struct.new(:newrev, :repo_created, :branch_created) do
alias_method :repo_created?, :repo_created
alias_method :branch_created?, :branch_created
+
+ def self.from_gitaly(branch_update)
+ new(
+ branch_update.commit_id,
+ branch_update.repo_created,
+ branch_update.branch_created
+ )
+ end
end
attr_reader :user, :repository
@@ -64,7 +72,7 @@ module Gitlab
# Whenever `start_branch_name` is passed, if `branch_name` doesn't exist,
# it would be created from `start_branch_name`.
- # If `start_project` is passed, and the branch doesn't exist,
+ # If `start_repository` is passed, and the branch doesn't exist,
# it would try to find the commits from it instead of current repository.
def with_branch(
branch_name,
@@ -72,15 +80,13 @@ module Gitlab
start_repository: repository,
&block)
- # Refactoring aid
- unless start_repository.is_a?(Gitlab::Git::Repository)
- raise "expected a Gitlab::Git::Repository, got #{start_repository}"
- end
+ Gitlab::Git.check_namespace!(start_repository)
+ start_repository = RemoteRepository.new(start_repository) unless start_repository.is_a?(RemoteRepository)
- start_branch_name = nil if start_repository.empty_repo?
+ start_branch_name = nil if start_repository.empty?
if start_branch_name && !start_repository.branch_exists?(start_branch_name)
- raise ArgumentError, "Cannot find branch #{start_branch_name} in #{start_repository.full_path}"
+ raise ArgumentError, "Cannot find branch #{start_branch_name} in #{start_repository.relative_path}"
end
update_branch_with_hooks(branch_name) do
@@ -91,6 +97,11 @@ module Gitlab
end
end
+ def update_branch(branch_name, newrev, oldrev)
+ ref = Gitlab::Git::BRANCH_REF_PREFIX + branch_name
+ update_ref_in_hooks(ref, newrev, oldrev)
+ end
+
private
# Returns [newrev, should_run_after_create, should_run_after_create_branch]
@@ -112,7 +123,7 @@ module Gitlab
ref = Gitlab::Git::BRANCH_REF_PREFIX + branch_name
update_ref_in_hooks(ref, newrev, oldrev)
- WithBranchResult.new(newrev, was_empty, was_empty || Gitlab::Git.blank_ref?(oldrev))
+ BranchUpdate.new(newrev, was_empty, was_empty || Gitlab::Git.blank_ref?(oldrev))
end
def find_oldrev_from_branch(newrev, branch)
@@ -120,7 +131,10 @@ module Gitlab
oldrev = branch.target
- if oldrev == repository.rugged.merge_base(newrev, branch.target)
+ merge_base = repository.merge_base(newrev, branch.target)
+ raise Gitlab::Git::Repository::InvalidRef unless merge_base
+
+ if oldrev == merge_base
oldrev
else
raise Gitlab::Git::CommitError.new('Branch diverged')
@@ -152,13 +166,15 @@ module Gitlab
# (and have!) accidentally reset the ref to an earlier state, clobbering
# commits. See also https://github.com/libgit2/libgit2/issues/1534.
command = %W[#{Gitlab.config.git.bin_path} update-ref --stdin -z]
- _, status = popen(
+
+ output, status = popen(
command,
repository.path) do |stdin|
stdin.write("update #{ref}\x00#{newrev}\x00#{oldrev}\x00")
end
unless status.zero?
+ Gitlab::GitLogger.error("'git update-ref' in #{repository.path}: #{output}")
raise Gitlab::Git::CommitError.new(
"Could not update branch #{Gitlab::Git.branch_name(ref)}." \
" Please refresh and try again.")
diff --git a/lib/gitlab/git/path_helper.rb b/lib/gitlab/git/path_helper.rb
index 42c80aabd0a..155cf52f050 100644
--- a/lib/gitlab/git/path_helper.rb
+++ b/lib/gitlab/git/path_helper.rb
@@ -6,7 +6,7 @@ module Gitlab
class << self
def normalize_path(filename)
# Strip all leading slashes so that //foo -> foo
- filename[/^\/*/] = ''
+ filename[%r{^/*}] = ''
# Expand relative paths (e.g. foo/../bar)
filename = Pathname.new(filename)
diff --git a/lib/gitlab/git/popen.rb b/lib/gitlab/git/popen.rb
index 3d2fc471d28..c1767046ff0 100644
--- a/lib/gitlab/git/popen.rb
+++ b/lib/gitlab/git/popen.rb
@@ -5,7 +5,9 @@ require 'open3'
module Gitlab
module Git
module Popen
- def popen(cmd, path, vars = {})
+ FAST_GIT_PROCESS_TIMEOUT = 15.seconds
+
+ def popen(cmd, path, vars = {}, lazy_block: nil)
unless cmd.is_a?(Array)
raise "System commands must be given as an array of strings"
end
@@ -14,18 +16,86 @@ module Gitlab
vars['PWD'] = path
options = { chdir: path }
- @cmd_output = ""
- @cmd_status = 0
+ cmd_output = ""
+ cmd_status = 0
Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|
+ stdout.set_encoding(Encoding::ASCII_8BIT)
+
yield(stdin) if block_given?
stdin.close
- @cmd_output << stdout.read
- @cmd_output << stderr.read
- @cmd_status = wait_thr.value.exitstatus
+ if lazy_block
+ return [lazy_block.call(stdout.lazy), 0]
+ else
+ cmd_output << stdout.read
+ end
+
+ cmd_output << stderr.read
+ cmd_status = wait_thr.value.exitstatus
+ end
+
+ [cmd_output, cmd_status]
+ end
+
+ def popen_with_timeout(cmd, timeout, path, vars = {})
+ unless cmd.is_a?(Array)
+ raise "System commands must be given as an array of strings"
+ end
+
+ path ||= Dir.pwd
+ vars['PWD'] = path
+
+ unless File.directory?(path)
+ FileUtils.mkdir_p(path)
+ end
+
+ rout, wout = IO.pipe
+ rerr, werr = IO.pipe
+
+ pid = Process.spawn(vars, *cmd, out: wout, err: werr, chdir: path, pgroup: true)
+
+ begin
+ status = process_wait_with_timeout(pid, timeout)
+
+ # close write ends so we could read them
+ wout.close
+ werr.close
+
+ cmd_output = rout.readlines.join
+ cmd_output << rerr.readlines.join # Copying the behaviour of `popen` which merges stderr into output
+
+ [cmd_output, status.exitstatus]
+ rescue Timeout::Error => e
+ kill_process_group_for_pid(pid)
+
+ raise e
+ ensure
+ wout.close unless wout.closed?
+ werr.close unless werr.closed?
+
+ rout.close
+ rerr.close
end
+ end
+
+ def process_wait_with_timeout(pid, timeout)
+ deadline = timeout.seconds.from_now
+ wait_time = 0.01
+
+ while deadline > Time.now
+ sleep(wait_time)
+ _, status = Process.wait2(pid, Process::WNOHANG)
+
+ return status unless status.nil?
+ end
+
+ raise Timeout::Error, "Timeout waiting for process ##{pid}"
+ end
- [@cmd_output, @cmd_status]
+ def kill_process_group_for_pid(pid)
+ Process.kill("KILL", -pid)
+ Process.wait(pid)
+ rescue Errno::ESRCH
end
end
end
diff --git a/lib/gitlab/git/ref.rb b/lib/gitlab/git/ref.rb
index 372ce005b94..fa71a4e7ea7 100644
--- a/lib/gitlab/git/ref.rb
+++ b/lib/gitlab/git/ref.rb
@@ -23,7 +23,7 @@ module Gitlab
# Ex.
# Ref.extract_branch_name('refs/heads/master') #=> 'master'
def self.extract_branch_name(str)
- str.gsub(/\Arefs\/heads\//, '')
+ str.gsub(%r{\Arefs/heads/}, '')
end
# Gitaly: this method will probably be migrated indirectly via its call sites.
@@ -33,9 +33,9 @@ module Gitlab
object
end
- def initialize(repository, name, target, derefenced_target)
+ def initialize(repository, name, target, dereferenced_target)
@name = Gitlab::Git.ref_name(name)
- @dereferenced_target = derefenced_target
+ @dereferenced_target = dereferenced_target
@target = if target.respond_to?(:oid)
target.oid
elsif target.respond_to?(:name)
diff --git a/lib/gitlab/git/remote_mirror.rb b/lib/gitlab/git/remote_mirror.rb
new file mode 100644
index 00000000000..ebe46722890
--- /dev/null
+++ b/lib/gitlab/git/remote_mirror.rb
@@ -0,0 +1,89 @@
+module Gitlab
+ module Git
+ class RemoteMirror
+ def initialize(repository, ref_name)
+ @repository = repository
+ @ref_name = ref_name
+ end
+
+ def update(only_branches_matching: [])
+ @repository.gitaly_migrate(:remote_update_remote_mirror) do |is_enabled|
+ if is_enabled
+ gitaly_update(only_branches_matching)
+ else
+ rugged_update(only_branches_matching)
+ end
+ end
+ end
+
+ private
+
+ def gitaly_update(only_branches_matching)
+ @repository.gitaly_remote_client.update_remote_mirror(@ref_name, only_branches_matching)
+ end
+
+ def rugged_update(only_branches_matching)
+ local_branches = refs_obj(@repository.local_branches, only_refs_matching: only_branches_matching)
+ remote_branches = refs_obj(@repository.remote_branches(@ref_name), only_refs_matching: only_branches_matching)
+
+ updated_branches = changed_refs(local_branches, remote_branches)
+ push_branches(updated_branches.keys) if updated_branches.present?
+
+ delete_refs(local_branches, remote_branches)
+
+ local_tags = refs_obj(@repository.tags)
+ remote_tags = refs_obj(@repository.remote_tags(@ref_name))
+
+ updated_tags = changed_refs(local_tags, remote_tags)
+ @repository.push_remote_branches(@ref_name, updated_tags.keys) if updated_tags.present?
+
+ delete_refs(local_tags, remote_tags)
+ end
+
+ def refs_obj(refs, only_refs_matching: [])
+ refs.each_with_object({}) do |ref, refs|
+ next if only_refs_matching.present? && !only_refs_matching.include?(ref.name)
+
+ refs[ref.name] = ref
+ end
+ end
+
+ def changed_refs(local_refs, remote_refs)
+ local_refs.select do |ref_name, ref|
+ remote_ref = remote_refs[ref_name]
+
+ remote_ref.nil? || ref.dereferenced_target != remote_ref.dereferenced_target
+ end
+ end
+
+ def push_branches(branches)
+ default_branch, branches = branches.partition do |branch|
+ @repository.root_ref == branch
+ end
+
+ # Push the default branch first so it works fine when remote mirror is empty.
+ branches.unshift(*default_branch)
+
+ @repository.push_remote_branches(@ref_name, branches)
+ end
+
+ def delete_refs(local_refs, remote_refs)
+ refs = refs_to_delete(local_refs, remote_refs)
+
+ @repository.delete_remote_branches(@ref_name, refs.keys) if refs.present?
+ end
+
+ def refs_to_delete(local_refs, remote_refs)
+ default_branch_id = @repository.commit.id
+
+ remote_refs.select do |remote_ref_name, remote_ref|
+ next false if local_refs[remote_ref_name] # skip if branch or tag exist in local repo
+
+ remote_ref_id = remote_ref.dereferenced_target.try(:id)
+
+ remote_ref_id && @repository.rugged_is_ancestor?(remote_ref_id, default_branch_id)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/remote_repository.rb b/lib/gitlab/git/remote_repository.rb
new file mode 100644
index 00000000000..6bd6e58feeb
--- /dev/null
+++ b/lib/gitlab/git/remote_repository.rb
@@ -0,0 +1,84 @@
+module Gitlab
+ module Git
+ #
+ # When a Gitaly call involves two repositories instead of one we cannot
+ # assume that both repositories are on the same Gitaly server. In this
+ # case we need to make a distinction between the repository that the
+ # call is being made on (a Repository instance), and the "other"
+ # repository (a RemoteRepository instance). This is the reason why we
+ # have the RemoteRepository class in Gitlab::Git.
+ #
+ # When you make changes, be aware that gitaly-ruby sub-classes this
+ # class.
+ #
+ class RemoteRepository
+ attr_reader :path, :relative_path, :gitaly_repository
+
+ def initialize(repository)
+ @relative_path = repository.relative_path
+ @gitaly_repository = repository.gitaly_repository
+
+ # These instance variables will not be available in gitaly-ruby, where
+ # we have no disk access to this repository.
+ @repository = repository
+ @path = repository.path
+ end
+
+ def empty?
+ # We will override this implementation in gitaly-ruby because we cannot
+ # use '@repository' there.
+ #
+ # Caches and memoization used on the Rails side
+ !@repository.exists? || @repository.empty?
+ end
+
+ def commit_id(revision)
+ # We will override this implementation in gitaly-ruby because we cannot
+ # use '@repository' there.
+ @repository.commit(revision)&.sha
+ end
+
+ def branch_exists?(name)
+ # We will override this implementation in gitaly-ruby because we cannot
+ # use '@repository' there.
+ @repository.branch_exists?(name)
+ end
+
+ # Compares self to a Gitlab::Git::Repository. This implementation uses
+ # 'self.gitaly_repository' so that it will also work in the
+ # GitalyRemoteRepository subclass defined in gitaly-ruby.
+ def same_repository?(other_repository)
+ gitaly_repository.storage_name == other_repository.storage &&
+ gitaly_repository.relative_path == other_repository.relative_path
+ end
+
+ def fetch_env
+ gitaly_ssh = File.absolute_path(File.join(Gitlab.config.gitaly.client_path, 'gitaly-ssh'))
+ gitaly_address = gitaly_client.address(storage)
+ gitaly_token = gitaly_client.token(storage)
+
+ request = Gitaly::SSHUploadPackRequest.new(repository: gitaly_repository)
+ env = {
+ 'GITALY_ADDRESS' => gitaly_address,
+ 'GITALY_PAYLOAD' => request.to_json,
+ 'GITALY_WD' => Dir.pwd,
+ 'GIT_SSH_COMMAND' => "#{gitaly_ssh} upload-pack"
+ }
+ env['GITALY_TOKEN'] = gitaly_token if gitaly_token.present?
+
+ env
+ end
+
+ private
+
+ # Must return an object that responds to 'address' and 'storage'.
+ def gitaly_client
+ Gitlab::GitalyClient
+ end
+
+ def storage
+ gitaly_repository.storage_name
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 0be35034d24..21c79a7a550 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -6,13 +6,22 @@ require "rubygems/package"
module Gitlab
module Git
class Repository
+ include Gitlab::Git::RepositoryMirroring
include Gitlab::Git::Popen
ALLOWED_OBJECT_DIRECTORIES_VARIABLES = %w[
GIT_OBJECT_DIRECTORY
GIT_ALTERNATE_OBJECT_DIRECTORIES
].freeze
+ ALLOWED_OBJECT_RELATIVE_DIRECTORIES_VARIABLES = %w[
+ GIT_OBJECT_DIRECTORY_RELATIVE
+ GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE
+ ].freeze
SEARCH_CONTEXT_LINES = 3
+ REBASE_WORKTREE_PREFIX = 'rebase'.freeze
+ SQUASH_WORKTREE_PREFIX = 'squash'.freeze
+ GITALY_INTERNAL_URL = 'ssh://gitaly/internal.git'.freeze
+ GITLAB_PROJECTS_TIMEOUT = Gitlab.config.gitlab_shell.git_timeout
NoRepository = Class.new(StandardError)
InvalidBlobName = Class.new(StandardError)
@@ -20,23 +29,43 @@ module Gitlab
GitError = Class.new(StandardError)
DeleteBranchError = Class.new(StandardError)
CreateTreeError = Class.new(StandardError)
+ TagExistsError = Class.new(StandardError)
class << self
- # Unlike `new`, `create` takes the storage path, not the storage name
- def create(storage_path, name, bare: true, symlink_hooks_to: nil)
- repo_path = File.join(storage_path, name)
- repo_path += '.git' unless repo_path.end_with?('.git')
-
+ # Unlike `new`, `create` takes the repository path
+ def create(repo_path, bare: true, symlink_hooks_to: nil)
FileUtils.mkdir_p(repo_path, mode: 0770)
# Equivalent to `git --git-path=#{repo_path} init [--bare]`
repo = Rugged::Repository.init_at(repo_path, bare)
repo.close
- if symlink_hooks_to.present?
- hooks_path = File.join(repo_path, 'hooks')
- FileUtils.rm_rf(hooks_path)
- FileUtils.ln_s(symlink_hooks_to, hooks_path)
+ create_hooks(repo_path, symlink_hooks_to) if symlink_hooks_to.present?
+
+ true
+ end
+
+ def create_hooks(repo_path, global_hooks_path)
+ local_hooks_path = File.join(repo_path, 'hooks')
+ real_local_hooks_path = :not_found
+
+ begin
+ real_local_hooks_path = File.realpath(local_hooks_path)
+ rescue Errno::ENOENT
+ # real_local_hooks_path == :not_found
+ end
+
+ # Do nothing if hooks already exist
+ unless real_local_hooks_path == File.realpath(global_hooks_path)
+ if File.exist?(local_hooks_path)
+ # Move the existing hooks somewhere safe
+ FileUtils.mv(
+ local_hooks_path,
+ "#{local_hooks_path}.old.#{Time.now.to_i}")
+ end
+
+ # Create the hooks symlink
+ FileUtils.ln_sf(global_hooks_path, local_hooks_path)
end
true
@@ -55,26 +84,27 @@ module Gitlab
# Rugged repo object
attr_reader :rugged
- attr_reader :storage, :gl_repository, :relative_path
+ attr_reader :gitlab_projects, :storage, :gl_repository, :relative_path
- # 'path' must be the path to a _bare_ git repository, e.g.
- # /path/to/my-repo.git
+ # This initializer method is only used on the client side (gitlab-ce).
+ # Gitaly-ruby uses a different initializer.
def initialize(storage, relative_path, gl_repository)
@storage = storage
@relative_path = relative_path
@gl_repository = gl_repository
storage_path = Gitlab.config.repositories.storages[@storage]['path']
+ @gitlab_projects = Gitlab::Git::GitlabProjects.new(
+ storage_path,
+ relative_path,
+ global_hooks_path: Gitlab.config.gitlab_shell.hooks_path,
+ logger: Rails.logger
+ )
@path = File.join(storage_path, @relative_path)
@name = @relative_path.split("/").last
- @attributes = Gitlab::Git::Attributes.new(path)
+ @attributes = Gitlab::Git::InfoAttributes.new(path)
end
- delegate :empty?,
- to: :rugged
-
- delegate :exists?, to: :gitaly_repository_client
-
def ==(other)
path == other.path
end
@@ -98,10 +128,26 @@ module Gitlab
raise NoRepository.new('no repository for such path')
end
+ def cleanup
+ @rugged&.close
+ end
+
def circuit_breaker
@circuit_breaker ||= Gitlab::Git::Storage::CircuitBreaker.for_storage(storage)
end
+ def exists?
+ Gitlab::GitalyClient.migrate(:repository_exists, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
+ if enabled
+ gitaly_repository_client.exists?
+ else
+ circuit_breaker.perform do
+ File.exist?(File.join(@path, 'refs'))
+ end
+ end
+ end
+ end
+
# Returns an Array of branch names
# sorted by name ASC
def branch_names
@@ -181,6 +227,35 @@ module Gitlab
end
end
+ def has_local_branches?
+ gitaly_migrate(:has_local_branches) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.has_local_branches?
+ else
+ has_local_branches_rugged?
+ end
+ end
+ end
+
+ # Git repository can contains some hidden refs like:
+ # /refs/notes/*
+ # /refs/git-as-svn/*
+ # /refs/pulls/*
+ # This refs by default not visible in project page and not cloned to client side.
+ alias_method :has_visible_content?, :has_local_branches?
+
+ def has_local_branches_rugged?
+ rugged.branches.each(:local).any? do |ref|
+ begin
+ ref.name && ref.target # ensures the branch is valid
+
+ true
+ rescue Rugged::ReferenceError
+ false
+ end
+ end
+ end
+
# Returns the number of valid tags
def tag_count
gitaly_migrate(:tag_names) do |is_enabled|
@@ -255,13 +330,27 @@ module Gitlab
end
end
+ def batch_existence(object_ids, existing: true)
+ filter_method = existing ? :select : :reject
+
+ object_ids.public_send(filter_method) do |oid| # rubocop:disable GitlabSecurity/PublicSend
+ rugged.exists?(oid)
+ end
+ end
+
# Returns an Array of branch and tag names
def ref_names
branch_names + tag_names
end
def delete_all_refs_except(prefixes)
- delete_refs(*all_ref_names_except(prefixes))
+ gitaly_migrate(:ref_delete_refs) do |is_enabled|
+ if is_enabled
+ gitaly_ref_client.delete_refs(except_with_prefixes: prefixes)
+ else
+ delete_refs(*all_ref_names_except(prefixes))
+ end
+ end
end
# Returns an Array of all ref names, except when it's matching pattern
@@ -377,41 +466,52 @@ module Gitlab
path: nil,
follow: false,
skip_merges: false,
- disable_walk: false,
after: nil,
- before: nil
+ before: nil,
+ all: false
}
options = default_options.merge(options)
- options[:limit] ||= 0
options[:offset] ||= 0
- raw_log(options).map { |c| Commit.decorate(self, c) }
+ limit = options[:limit]
+ if limit == 0 || !limit.is_a?(Integer)
+ raise ArgumentError.new("invalid Repository#log limit: #{limit.inspect}")
+ end
+
+ gitaly_migrate(:find_commits) do |is_enabled|
+ if is_enabled
+ gitaly_commit_client.find_commits(options)
+ else
+ raw_log(options).map { |c| Commit.decorate(self, c) }
+ end
+ end
end
# Used in gitaly-ruby
def raw_log(options)
- actual_ref = options[:ref] || root_ref
- begin
- sha = sha_from_ref(actual_ref)
- rescue Rugged::OdbError, Rugged::InvalidError, Rugged::ReferenceError
- # Return an empty array if the ref wasn't found
- return []
- end
+ sha =
+ unless options[:all]
+ actual_ref = options[:ref] || root_ref
+ begin
+ sha_from_ref(actual_ref)
+ rescue Rugged::OdbError, Rugged::InvalidError, Rugged::ReferenceError
+ # Return an empty array if the ref wasn't found
+ return []
+ end
+ end
- if log_using_shell?(options)
- log_by_shell(sha, options)
- else
- log_by_walk(sha, options)
- end
+ log_by_shell(sha, options)
end
def count_commits(options)
+ count_commits_options = process_count_commits_options(options)
+
gitaly_migrate(:count_commits) do |is_enabled|
if is_enabled
- count_commits_by_gitaly(options)
+ count_commits_by_gitaly(count_commits_options)
else
- count_commits_by_shelling_out(options)
+ count_commits_by_shelling_out(count_commits_options)
end
end
end
@@ -449,25 +549,57 @@ module Gitlab
end
# Counts the amount of commits between `from` and `to`.
- def count_commits_between(from, to)
- Commit.between(self, from, to).size
+ def count_commits_between(from, to, options = {})
+ count_commits(from: from, to: to, **options)
end
# Returns the SHA of the most recent common ancestor of +from+ and +to+
- def merge_base_commit(from, to)
- rugged.merge_base(from, to)
+ def merge_base(from, to)
+ gitaly_migrate(:merge_base) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.find_merge_base(from, to)
+ else
+ rugged_merge_base(from, to)
+ end
+ end
end
# Gitaly note: JV: check gitlab-ee before removing this method.
def rugged_is_ancestor?(ancestor_id, descendant_id)
return false if ancestor_id.nil? || descendant_id.nil?
- merge_base_commit(ancestor_id, descendant_id) == ancestor_id
+ rugged_merge_base(ancestor_id, descendant_id) == ancestor_id
+ rescue Rugged::OdbError
+ false
end
# Returns true is +from+ is direct ancestor to +to+, otherwise false
def ancestor?(from, to)
- gitaly_commit_client.ancestor?(from, to)
+ Gitlab::GitalyClient.migrate(:is_ancestor) do |is_enabled|
+ if is_enabled
+ gitaly_commit_client.ancestor?(from, to)
+ else
+ rugged_is_ancestor?(from, to)
+ end
+ end
+ end
+
+ def merged_branch_names(branch_names = [])
+ return [] unless root_ref
+
+ root_sha = find_branch(root_ref)&.target
+
+ return [] unless root_sha
+
+ branches = gitaly_migrate(:merged_branch_names) do |is_enabled|
+ if is_enabled
+ gitaly_merged_branch_names(branch_names, root_sha)
+ else
+ git_merged_branch_names(branch_names, root_sha)
+ end
+ end
+
+ Set.new(branches)
end
# Return an array of Diff objects that represent the diff
@@ -475,7 +607,15 @@ module Gitlab
# diff options. The +options+ hash can also include :break_rewrites to
# split larger rewrites into delete/add pairs.
def diff(from, to, options = {}, *paths)
- Gitlab::Git::DiffCollection.new(diff_patches(from, to, options, *paths), options)
+ iterator = gitaly_migrate(:diff_between) do |is_enabled|
+ if is_enabled
+ gitaly_commit_client.diff(from, to, options.merge(paths: paths))
+ else
+ diff_patches(from, to, options, *paths)
+ end
+ end
+
+ Gitlab::Git::DiffCollection.new(iterator, options)
end
# Returns a RefName for a given SHA
@@ -486,62 +626,29 @@ module Gitlab
if is_enabled
gitaly_ref_client.find_ref_name(sha, ref_path)
else
- args = %W(#{Gitlab.config.git.bin_path} for-each-ref --count=1 #{ref_path} --contains #{sha})
+ args = %W(for-each-ref --count=1 #{ref_path} --contains #{sha})
# Not found -> ["", 0]
# Found -> ["b8d95eb4969eefacb0a58f6a28f6803f8070e7b9 commit\trefs/environments/production/77\n", 0]
- popen(args, @path).first.split.last
+ run_git(args).first.split.last
end
end
end
- # Returns branch names collection that contains the special commit(SHA1
- # or name)
- #
- # Ex.
- # repo.branch_names_contains('master')
- #
- def branch_names_contains(commit)
- branches_contains(commit).map { |c| c.name }
- end
-
- # Returns branch collection that contains the special commit(SHA1 or name)
- #
- # Ex.
- # repo.branch_names_contains('master')
- #
- def branches_contains(commit)
- commit_obj = rugged.rev_parse(commit)
- parent = commit_obj.parents.first unless commit_obj.parents.empty?
+ # Get refs hash which key is is the commit id
+ # and value is a Gitlab::Git::Tag or Gitlab::Git::Branch
+ # Note that both inherit from Gitlab::Git::Ref
+ def refs_hash
+ return @refs_hash if @refs_hash
- walker = Rugged::Walker.new(rugged)
+ @refs_hash = Hash.new { |h, k| h[k] = [] }
- rugged.branches.select do |branch|
- walker.push(branch.target_id)
- walker.hide(parent) if parent
- result = walker.any? { |c| c.oid == commit_obj.oid }
- walker.reset
+ (tags + branches).each do |ref|
+ next unless ref.target && ref.name
- result
+ @refs_hash[ref.dereferenced_target.id] << ref.name
end
- end
- # Get refs hash which key is SHA1
- # and value is a Rugged::Reference
- def refs_hash
- # Initialize only when first call
- if @refs_hash.nil?
- @refs_hash = Hash.new { |h, k| h[k] = [] }
-
- rugged.references.each do |r|
- # Symbolic/remote references may not have an OID; skip over them
- target_oid = r.target.try(:oid)
- if target_oid
- sha = rev_parse_target(target_oid).oid
- @refs_hash[sha] << r
- end
- end
- end
@refs_hash
end
@@ -578,11 +685,7 @@ module Gitlab
if is_enabled
gitaly_commit_client.commit_count(ref)
else
- walker = Rugged::Walker.new(rugged)
- walker.sorting(Rugged::SORT_TOPO | Rugged::SORT_REVERSE)
- oid = rugged.rev_parse_oid(ref)
- walker.push(oid)
- walker.count
+ rugged_commit_count(ref)
end
end
end
@@ -612,49 +715,60 @@ module Gitlab
end
def add_branch(branch_name, user:, target:)
- target_object = Ref.dereference_object(lookup(target))
- raise InvalidRef.new("target not found: #{target}") unless target_object
-
- OperationService.new(user, self).add_branch(branch_name, target_object.oid)
- find_branch(branch_name)
- rescue Rugged::ReferenceError => ex
- raise InvalidRef, ex
+ gitaly_migrate(:operation_user_create_branch) do |is_enabled|
+ if is_enabled
+ gitaly_add_branch(branch_name, user, target)
+ else
+ rugged_add_branch(branch_name, user, target)
+ end
+ end
end
def add_tag(tag_name, user:, target:, message: nil)
- target_object = Ref.dereference_object(lookup(target))
- raise InvalidRef.new("target not found: #{target}") unless target_object
-
- user = Gitlab::Git::User.from_gitlab(user) unless user.respond_to?(:gl_id)
-
- options = nil # Use nil, not the empty hash. Rugged cares about this.
- if message
- options = {
- message: message,
- tagger: Gitlab::Git.committer_hash(email: user.email, name: user.name)
- }
+ gitaly_migrate(:operation_user_add_tag) do |is_enabled|
+ if is_enabled
+ gitaly_add_tag(tag_name, user: user, target: target, message: message)
+ else
+ rugged_add_tag(tag_name, user: user, target: target, message: message)
+ end
end
-
- OperationService.new(user, self).add_tag(tag_name, target_object.oid, options)
-
- find_tag(tag_name)
- rescue Rugged::ReferenceError => ex
- raise InvalidRef, ex
end
def rm_branch(branch_name, user:)
- OperationService.new(user, self).rm_branch(find_branch(branch_name))
+ gitaly_migrate(:operation_user_delete_branch) do |is_enabled|
+ if is_enabled
+ gitaly_operations_client.user_delete_branch(branch_name, user)
+ else
+ OperationService.new(user, self).rm_branch(find_branch(branch_name))
+ end
+ end
end
def rm_tag(tag_name, user:)
- OperationService.new(user, self).rm_tag(find_tag(tag_name))
+ gitaly_migrate(:operation_user_delete_tag) do |is_enabled|
+ if is_enabled
+ gitaly_operations_client.rm_tag(tag_name, user)
+ else
+ Gitlab::Git::OperationService.new(user, self).rm_tag(find_tag(tag_name))
+ end
+ end
end
def find_tag(name)
tags.find { |tag| tag.name == name }
end
- def merge(user, source_sha, target_branch, message)
+ def merge(user, source_sha, target_branch, message, &block)
+ gitaly_migrate(:operation_user_merge_branch) do |is_enabled|
+ if is_enabled
+ gitaly_operation_client.user_merge_branch(user, source_sha, target_branch, message, &block)
+ else
+ rugged_merge(user, source_sha, target_branch, message, &block)
+ end
+ end
+ end
+
+ def rugged_merge(user, source_sha, target_branch, message)
committer = Gitlab::Git.committer_hash(email: user.email, name: user.name)
OperationService.new(user, self).with_branch(target_branch) do |start_commit|
@@ -685,25 +799,32 @@ module Gitlab
nil
end
- def revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
- OperationService.new(user, self).with_branch(
- branch_name,
- start_branch_name: start_branch_name,
- start_repository: start_repository
- ) do |start_commit|
-
- Gitlab::Git.check_namespace!(commit, start_repository)
-
- revert_tree_id = check_revert_content(commit, start_commit.sha)
- raise CreateTreeError unless revert_tree_id
+ def ff_merge(user, source_sha, target_branch)
+ gitaly_migrate(:operation_user_ff_branch) do |is_enabled|
+ if is_enabled
+ gitaly_ff_merge(user, source_sha, target_branch)
+ else
+ rugged_ff_merge(user, source_sha, target_branch)
+ end
+ end
+ end
- committer = user_to_committer(user)
+ def revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ gitaly_migrate(:revert) do |is_enabled|
+ args = {
+ user: user,
+ commit: commit,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ }
- create_commit(message: message,
- author: committer,
- committer: committer,
- tree: revert_tree_id,
- parents: [start_commit.sha])
+ if is_enabled
+ gitaly_operations_client.user_revert(args)
+ else
+ rugged_revert(args)
+ end
end
end
@@ -721,44 +842,24 @@ module Gitlab
end
def cherry_pick(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
- OperationService.new(user, self).with_branch(
- branch_name,
- start_branch_name: start_branch_name,
- start_repository: start_repository
- ) do |start_commit|
-
- Gitlab::Git.check_namespace!(commit, start_repository)
-
- cherry_pick_tree_id = check_cherry_pick_content(commit, start_commit.sha)
- raise CreateTreeError unless cherry_pick_tree_id
-
- committer = user_to_committer(user)
+ gitaly_migrate(:cherry_pick) do |is_enabled|
+ args = {
+ user: user,
+ commit: commit,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ }
- create_commit(message: message,
- author: {
- email: commit.author_email,
- name: commit.author_name,
- time: commit.authored_date
- },
- committer: committer,
- tree: cherry_pick_tree_id,
- parents: [start_commit.sha])
+ if is_enabled
+ gitaly_operations_client.user_cherry_pick(args)
+ else
+ rugged_cherry_pick(args)
+ end
end
end
- def check_cherry_pick_content(target_commit, source_sha)
- args = [target_commit.sha, source_sha]
- args << 1 if target_commit.merge_commit?
-
- cherry_pick_index = rugged.cherrypick_commit(*args)
- return false if cherry_pick_index.conflicts?
-
- tree_id = cherry_pick_index.write_tree(rugged)
- return false unless diff_exists?(source_sha, tree_id)
-
- tree_id
- end
-
def diff_exists?(sha1, sha2)
rugged.diff(sha1, sha2).size > 0
end
@@ -787,17 +888,12 @@ module Gitlab
end
def delete_refs(*ref_names)
- instructions = ref_names.map do |ref|
- "delete #{ref}\x00\x00"
- end
-
- command = %W[#{Gitlab.config.git.bin_path} update-ref --stdin -z]
- message, status = popen(command, path) do |stdin|
- stdin.write(instructions.join)
- end
-
- unless status.zero?
- raise GitError.new("Could not delete refs #{ref_names}: #{message}")
+ gitaly_migrate(:delete_refs) do |is_enabled|
+ if is_enabled
+ gitaly_delete_refs(*ref_names)
+ else
+ git_delete_refs(*ref_names)
+ end
end
end
@@ -816,16 +912,25 @@ module Gitlab
end
end
- # Delete the specified remote from this repository.
- def remote_delete(remote_name)
- rugged.remotes.delete(remote_name)
- nil
+ # If `mirror_refmap` is present the remote is set as mirror with that mapping
+ def add_remote(remote_name, url, mirror_refmap: nil)
+ gitaly_migrate(:remote_add_remote) do |is_enabled|
+ if is_enabled
+ gitaly_remote_client.add_remote(remote_name, url, mirror_refmap)
+ else
+ rugged_add_remote(remote_name, url, mirror_refmap)
+ end
+ end
end
- # Add a new remote to this repository.
- def remote_add(remote_name, url)
- rugged.remotes.create(remote_name, url)
- nil
+ def remove_remote(remote_name)
+ gitaly_migrate(:remote_remove_remote) do |is_enabled|
+ if is_enabled
+ gitaly_remote_client.remove_remote(remote_name)
+ else
+ rugged_remove_remote(remote_name)
+ end
+ end
end
# Update the specified remote using the values in the +options+ hash
@@ -888,6 +993,22 @@ module Gitlab
@attributes.attributes(path)
end
+ def gitattribute(path, name)
+ attributes(path)[name]
+ end
+
+ # Check .gitattributes for a given ref
+ #
+ # This only checks the root .gitattributes file,
+ # it does not traverse subfolders to find additional .gitattributes files
+ #
+ # This method is around 30 times slower than `attributes`,
+ # which uses `$GIT_DIR/info/attributes`
+ def attributes_at(ref, file_path)
+ parser = AttributesAtRefParser.new(self, ref)
+ parser.attributes(file_path)
+ end
+
def languages(ref = nil)
Gitlab::GitalyClient.migrate(:commit_languages) do |is_enabled|
if is_enabled
@@ -915,21 +1036,39 @@ module Gitlab
end
end
+ def license_short_name
+ gitaly_migrate(:license_short_name) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.license_short_name
+ else
+ begin
+ # The licensee gem creates a Rugged object from the path:
+ # https://github.com/benbalter/licensee/blob/v8.7.0/lib/licensee/projects/git_project.rb
+ Licensee.license(path).try(:key)
+ rescue Rugged::Error
+ end
+ end
+ end
+ end
+
def with_repo_branch_commit(start_repository, start_branch_name)
Gitlab::Git.check_namespace!(start_repository)
+ start_repository = RemoteRepository.new(start_repository) unless start_repository.is_a?(RemoteRepository)
- return yield nil if start_repository.empty_repo?
+ return yield nil if start_repository.empty?
- if start_repository == self
+ if start_repository.same_repository?(self)
yield commit(start_branch_name)
else
- sha = start_repository.commit(start_branch_name).sha
+ start_commit_id = start_repository.commit_id(start_branch_name)
+
+ return yield nil unless start_commit_id
- if branch_commit = commit(sha)
+ if branch_commit = commit(start_commit_id)
yield branch_commit
else
with_repo_tmp_commit(
- start_repository, start_branch_name, sha) do |tmp_commit|
+ start_repository, start_branch_name, start_commit_id) do |tmp_commit|
yield tmp_commit
end
end
@@ -937,10 +1076,16 @@ module Gitlab
end
def with_repo_tmp_commit(start_repository, start_branch_name, sha)
+ source_ref = start_branch_name
+
+ unless Gitlab::Git.branch_ref?(source_ref)
+ source_ref = "#{Gitlab::Git::BRANCH_REF_PREFIX}#{source_ref}"
+ end
+
tmp_ref = fetch_ref(
- start_repository.path,
- "#{Gitlab::Git::BRANCH_REF_PREFIX}#{start_branch_name}",
- "refs/tmp/#{SecureRandom.hex}/head"
+ start_repository,
+ source_ref: source_ref,
+ target_ref: "refs/tmp/#{SecureRandom.hex}"
)
yield commit(sha)
@@ -948,12 +1093,12 @@ module Gitlab
delete_refs(tmp_ref) if tmp_ref
end
- def fetch_source_branch(source_repository, source_branch, local_ref)
- with_repo_branch_commit(source_repository, source_branch) do |commit|
- if commit
- write_ref(local_ref, commit.sha)
+ def fetch_source_branch!(source_repository, source_branch, local_ref)
+ Gitlab::GitalyClient.migrate(:fetch_source_branch) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.fetch_source_branch(source_repository, source_branch, local_ref)
else
- raise Rugged::ReferenceError, 'source repository is empty'
+ rugged_fetch_source_branch(source_repository, source_branch, local_ref)
end
end
end
@@ -971,13 +1116,29 @@ module Gitlab
end
end
- def write_ref(ref_path, sha)
- rugged.references.create(ref_path, sha, force: true)
+ def write_ref(ref_path, ref, old_ref: nil, shell: true)
+ ref_path = "#{Gitlab::Git::BRANCH_REF_PREFIX}#{ref_path}" unless ref_path.start_with?("refs/") || ref_path == "HEAD"
+
+ gitaly_migrate(:write_ref) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.write_ref(ref_path, ref, old_ref, shell)
+ else
+ local_write_ref(ref_path, ref, old_ref: old_ref, shell: shell)
+ end
+ end
end
- def fetch_ref(source_path, source_ref, target_ref)
- args = %W(fetch --no-tags -f #{source_path} #{source_ref}:#{target_ref})
- message, status = run_git(args)
+ def fetch_ref(source_repository, source_ref:, target_ref:)
+ Gitlab::Git.check_namespace!(source_repository)
+ source_repository = RemoteRepository.new(source_repository) unless source_repository.is_a?(RemoteRepository)
+
+ message, status = GitalyClient.migrate(:fetch_ref) do |is_enabled|
+ if is_enabled
+ gitaly_fetch_ref(source_repository, source_ref: source_ref, target_ref: target_ref)
+ else
+ local_fetch_ref(source_repository.path, source_ref: source_ref, target_ref: target_ref)
+ end
+ end
# Make sure ref was created, and raise Rugged::ReferenceError when not
raise Rugged::ReferenceError, message if status != 0
@@ -986,37 +1147,188 @@ module Gitlab
end
# Refactoring aid; allows us to copy code from app/models/repository.rb
- def run_git(args)
- circuit_breaker.perform do
- popen([Gitlab.config.git.bin_path, *args], path)
+ def commit(ref = 'HEAD')
+ Gitlab::Git::Commit.find(self, ref)
+ end
+
+ def empty?
+ !has_visible_content?
+ end
+
+ def fetch_repository_as_mirror(repository)
+ gitaly_migrate(:remote_fetch_internal_remote) do |is_enabled|
+ if is_enabled
+ gitaly_remote_client.fetch_internal_remote(repository)
+ else
+ rugged_fetch_repository_as_mirror(repository)
+ end
end
end
- # Refactoring aid; allows us to copy code from app/models/repository.rb
- def commit(ref = 'HEAD')
- Gitlab::Git::Commit.find(self, ref)
+ def blob_at(sha, path)
+ Gitlab::Git::Blob.find(self, sha, path) unless Gitlab::Git.blank_ref?(sha)
end
- # Refactoring aid; allows us to copy code from app/models/repository.rb
- def empty_repo?
- !exists? || !has_visible_content?
+ # Items should be of format [[commit_id, path], [commit_id1, path1]]
+ def batch_blobs(items, blob_size_limit: Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE)
+ Gitlab::Git::Blob.batch(self, items, blob_size_limit: blob_size_limit)
end
- #
- # Git repository can contains some hidden refs like:
- # /refs/notes/*
- # /refs/git-as-svn/*
- # /refs/pulls/*
- # This refs by default not visible in project page and not cloned to client side.
- #
- # This method return true if repository contains some content visible in project page.
- #
- def has_visible_content?
- branch_count > 0
+ def commit_index(user, branch_name, index, options)
+ committer = user_to_committer(user)
+
+ OperationService.new(user, self).with_branch(branch_name) do
+ commit_params = options.merge(
+ tree: index.write_tree(rugged),
+ author: committer,
+ committer: committer
+ )
+
+ create_commit(commit_params)
+ end
+ end
+
+ def fsck
+ gitaly_migrate(:git_fsck) do |is_enabled|
+ msg, status = if is_enabled
+ gitaly_fsck
+ else
+ shell_fsck
+ end
+
+ raise GitError.new("Could not fsck repository: #{msg}") unless status.zero?
+ end
+ end
+
+ def create_from_bundle(bundle_path)
+ gitaly_migrate(:create_repo_from_bundle) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.create_from_bundle(bundle_path)
+ else
+ run_git!(%W(clone --bare -- #{bundle_path} #{path}), chdir: nil)
+ self.class.create_hooks(path, File.expand_path(Gitlab.config.gitlab_shell.hooks_path))
+ end
+ end
+
+ true
+ end
+
+ def rebase(user, rebase_id, branch:, branch_sha:, remote_repository:, remote_branch:)
+ gitaly_migrate(:rebase) do |is_enabled|
+ if is_enabled
+ gitaly_rebase(user, rebase_id,
+ branch: branch,
+ branch_sha: branch_sha,
+ remote_repository: remote_repository,
+ remote_branch: remote_branch)
+ else
+ git_rebase(user, rebase_id,
+ branch: branch,
+ branch_sha: branch_sha,
+ remote_repository: remote_repository,
+ remote_branch: remote_branch)
+ end
+ end
+ end
+
+ def rebase_in_progress?(rebase_id)
+ gitaly_migrate(:rebase_in_progress) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.rebase_in_progress?(rebase_id)
+ else
+ fresh_worktree?(worktree_path(REBASE_WORKTREE_PREFIX, rebase_id))
+ end
+ end
+ end
+
+ def squash(user, squash_id, branch:, start_sha:, end_sha:, author:, message:)
+ gitaly_migrate(:squash) do |is_enabled|
+ if is_enabled
+ gitaly_operation_client.user_squash(user, squash_id, branch,
+ start_sha, end_sha, author, message)
+ else
+ git_squash(user, squash_id, branch, start_sha, end_sha, author, message)
+ end
+ end
+ end
+
+ def squash_in_progress?(squash_id)
+ gitaly_migrate(:squash_in_progress) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.squash_in_progress?(squash_id)
+ else
+ fresh_worktree?(worktree_path(SQUASH_WORKTREE_PREFIX, squash_id))
+ end
+ end
+ end
+
+ def push_remote_branches(remote_name, branch_names, forced: true)
+ success = @gitlab_projects.push_branches(remote_name, GITLAB_PROJECTS_TIMEOUT, forced, branch_names)
+
+ success || gitlab_projects_error
+ end
+
+ def delete_remote_branches(remote_name, branch_names)
+ success = @gitlab_projects.delete_remote_branches(remote_name, branch_names)
+
+ success || gitlab_projects_error
+ end
+
+ def delete_remote_branches(remote_name, branch_names)
+ success = @gitlab_projects.delete_remote_branches(remote_name, branch_names)
+
+ success || gitlab_projects_error
+ end
+
+ def bundle_to_disk(save_path)
+ gitaly_migrate(:bundle_to_disk) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.create_bundle(save_path)
+ else
+ run_git!(%W(bundle create #{save_path} --all))
+ end
+ end
+
+ true
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def multi_action(
+ user, branch_name:, message:, actions:,
+ author_email: nil, author_name: nil,
+ start_branch_name: nil, start_repository: self)
+
+ gitaly_migrate(:operation_user_commit_files) do |is_enabled|
+ if is_enabled
+ gitaly_operation_client.user_commit_files(user, branch_name,
+ message, actions, author_email, author_name,
+ start_branch_name, start_repository)
+ else
+ rugged_multi_action(user, branch_name, message, actions,
+ author_email, author_name, start_branch_name, start_repository)
+ end
+ end
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def write_config(full_path:)
+ return unless full_path.present?
+
+ gitaly_migrate(:write_config) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.write_config(full_path: full_path)
+ else
+ rugged_write_config(full_path: full_path)
+ end
+ end
end
def gitaly_repository
- Gitlab::GitalyClient::Util.repository(@storage, @relative_path)
+ Gitlab::GitalyClient::Util.repository(@storage, @relative_path, @gl_repository)
+ end
+
+ def gitaly_operations_client
+ @gitaly_operations_client ||= Gitlab::GitalyClient::OperationService.new(self)
end
def gitaly_ref_client
@@ -1031,49 +1343,360 @@ module Gitlab
@gitaly_repository_client ||= Gitlab::GitalyClient::RepositoryService.new(self)
end
+ def gitaly_operation_client
+ @gitaly_operation_client ||= Gitlab::GitalyClient::OperationService.new(self)
+ end
+
+ def gitaly_remote_client
+ @gitaly_remote_client ||= Gitlab::GitalyClient::RemoteService.new(self)
+ end
+
+ def gitaly_blob_client
+ @gitaly_blob_client ||= Gitlab::GitalyClient::BlobService.new(self)
+ end
+
+ def gitaly_conflicts_client(our_commit_oid, their_commit_oid)
+ Gitlab::GitalyClient::ConflictsService.new(self, our_commit_oid, their_commit_oid)
+ end
+
def gitaly_migrate(method, status: Gitlab::GitalyClient::MigrationStatus::OPT_IN, &block)
Gitlab::GitalyClient.migrate(method, status: status, &block)
rescue GRPC::NotFound => e
raise NoRepository.new(e)
+ rescue GRPC::InvalidArgument => e
+ raise ArgumentError.new(e)
rescue GRPC::BadStatus => e
raise CommandError.new(e)
end
+ def branch_names_contains_sha(sha)
+ gitaly_migrate(:branch_names_contains_sha) do |is_enabled|
+ if is_enabled
+ gitaly_ref_client.branch_names_contains_sha(sha)
+ else
+ refs_contains_sha('refs/heads/', sha)
+ end
+ end
+ end
+
+ def tag_names_contains_sha(sha)
+ gitaly_migrate(:tag_names_contains_sha) do |is_enabled|
+ if is_enabled
+ gitaly_ref_client.tag_names_contains_sha(sha)
+ else
+ refs_contains_sha('refs/tags/', sha)
+ end
+ end
+ end
+
+ def search_files_by_content(query, ref)
+ return [] if empty? || query.blank?
+
+ offset = 2
+ args = %W(grep -i -I -n -z --before-context #{offset} --after-context #{offset} -E -e #{Regexp.escape(query)} #{ref || root_ref})
+
+ run_git(args).first.scrub.split(/^--$/)
+ end
+
+ def can_be_merged?(source_sha, target_branch)
+ gitaly_migrate(:can_be_merged) do |is_enabled|
+ if is_enabled
+ gitaly_can_be_merged?(source_sha, find_branch(target_branch, true).target)
+ else
+ rugged_can_be_merged?(source_sha, target_branch)
+ end
+ end
+ end
+
+ def search_files_by_name(query, ref)
+ safe_query = Regexp.escape(query.sub(%r{^/*}, ""))
+
+ return [] if empty? || safe_query.blank?
+
+ args = %W(ls-tree --full-tree -r #{ref || root_ref} --name-status | #{safe_query})
+
+ run_git(args).first.lines.map(&:strip)
+ end
+
+ def find_commits_by_message(query, ref, path, limit, offset)
+ gitaly_migrate(:commits_by_message) do |is_enabled|
+ if is_enabled
+ find_commits_by_message_by_gitaly(query, ref, path, limit, offset)
+ else
+ find_commits_by_message_by_shelling_out(query, ref, path, limit, offset)
+ end
+ end
+ end
+
+ def shell_blame(sha, path)
+ output, _status = run_git(%W(blame -p #{sha} -- #{path}))
+ output
+ end
+
+ def can_be_merged?(source_sha, target_branch)
+ gitaly_migrate(:can_be_merged) do |is_enabled|
+ if is_enabled
+ gitaly_can_be_merged?(source_sha, find_branch(target_branch).target)
+ else
+ rugged_can_be_merged?(source_sha, target_branch)
+ end
+ end
+ end
+
+ def last_commit_id_for_path(sha, path)
+ gitaly_migrate(:last_commit_for_path) do |is_enabled|
+ if is_enabled
+ last_commit_for_path_by_gitaly(sha, path).id
+ else
+ last_commit_id_for_path_by_shelling_out(sha, path)
+ end
+ end
+ end
+
+ def rev_list(including: [], excluding: [], objects: false, &block)
+ args = ['rev-list']
+
+ args.push(*rev_list_param(including))
+
+ exclude_param = *rev_list_param(excluding)
+ if exclude_param.any?
+ args.push('--not')
+ args.push(*exclude_param)
+ end
+
+ args.push('--objects') if objects
+
+ run_git!(args, lazy_block: block)
+ end
+
+ def missed_ref(oldrev, newrev)
+ run_git!(['rev-list', '--max-count=1', oldrev, "^#{newrev}"])
+ end
+
private
+ def local_write_ref(ref_path, ref, old_ref: nil, shell: true)
+ if shell
+ shell_write_ref(ref_path, ref, old_ref)
+ else
+ rugged_write_ref(ref_path, ref)
+ end
+ end
+
+ def refs_contains_sha(refs_prefix, sha)
+ refs_prefix << "/" unless refs_prefix.ends_with?('/')
+
+ # By forcing the output to %(refname) each line wiht a ref will start with
+ # the ref prefix. All other lines can be discarded.
+ args = %W(for-each-ref --contains=#{sha} --format=%(refname) #{refs_prefix})
+ names, code = run_git(args)
+
+ return [] unless code.zero?
+
+ refs = []
+ left_slice_count = refs_prefix.length
+ names.lines.each do |line|
+ next unless line.start_with?(refs_prefix)
+
+ refs << line.rstrip[left_slice_count..-1]
+ end
+
+ refs
+ end
+
+ def rugged_write_config(full_path:)
+ rugged.config['gitlab.fullpath'] = full_path
+ end
+
+ def shell_write_ref(ref_path, ref, old_ref)
+ raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ')
+ raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00")
+ raise ArgumentError, "invalid old_ref #{old_ref.inspect}" if !old_ref.nil? && old_ref.include?("\x00")
+
+ input = "update #{ref_path}\x00#{ref}\x00#{old_ref}\x00"
+ run_git!(%w[update-ref --stdin -z]) { |stdin| stdin.write(input) }
+ end
+
+ def rugged_write_ref(ref_path, ref)
+ rugged.references.create(ref_path, ref, force: true)
+ rescue Rugged::ReferenceError => ex
+ Rails.logger.error "Unable to create #{ref_path} reference for repository #{path}: #{ex}"
+ rescue Rugged::OSError => ex
+ raise unless ex.message =~ /Failed to create locked file/ && ex.message =~ /File exists/
+
+ Rails.logger.error "Unable to create #{ref_path} reference for repository #{path}: #{ex}"
+ end
+
+ def run_git(args, chdir: path, env: {}, nice: false, lazy_block: nil, &block)
+ cmd = [Gitlab.config.git.bin_path, *args]
+ cmd.unshift("nice") if nice
+
+ object_directories = alternate_object_directories
+ if object_directories.any?
+ env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = object_directories.join(File::PATH_SEPARATOR)
+ end
+
+ circuit_breaker.perform do
+ popen(cmd, chdir, env, lazy_block: lazy_block, &block)
+ end
+ end
+
+ def run_git!(args, chdir: path, env: {}, nice: false, lazy_block: nil, &block)
+ output, status = run_git(args, chdir: chdir, env: env, nice: nice, lazy_block: lazy_block, &block)
+
+ raise GitError, output unless status.zero?
+
+ output
+ end
+
+ def run_git_with_timeout(args, timeout, env: {})
+ circuit_breaker.perform do
+ popen_with_timeout([Gitlab.config.git.bin_path, *args], timeout, path, env)
+ end
+ end
+
+ def fresh_worktree?(path)
+ File.exist?(path) && !clean_stuck_worktree(path)
+ end
+
+ def with_worktree(worktree_path, branch, sparse_checkout_files: nil, env:)
+ base_args = %w(worktree add --detach)
+
+ # Note that we _don't_ want to test for `.present?` here: If the caller
+ # passes an non nil empty value it means it still wants sparse checkout
+ # but just isn't interested in any file, perhaps because it wants to
+ # checkout files in by a changeset but that changeset only adds files.
+ if sparse_checkout_files
+ # Create worktree without checking out
+ run_git!(base_args + ['--no-checkout', worktree_path], env: env)
+ worktree_git_path = run_git!(%w(rev-parse --git-dir), chdir: worktree_path).chomp
+
+ configure_sparse_checkout(worktree_git_path, sparse_checkout_files)
+
+ # After sparse checkout configuration, checkout `branch` in worktree
+ run_git!(%W(checkout --detach #{branch}), chdir: worktree_path, env: env)
+ else
+ # Create worktree and checkout `branch` in it
+ run_git!(base_args + [worktree_path, branch], env: env)
+ end
+
+ yield
+ ensure
+ FileUtils.rm_rf(worktree_path) if File.exist?(worktree_path)
+ FileUtils.rm_rf(worktree_git_path) if worktree_git_path && File.exist?(worktree_git_path)
+ end
+
+ def clean_stuck_worktree(path)
+ return false unless File.mtime(path) < 15.minutes.ago
+
+ FileUtils.rm_rf(path)
+ true
+ end
+
+ # Adding a worktree means checking out the repository. For large repos,
+ # this can be very expensive, so set up sparse checkout for the worktree
+ # to only check out the files we're interested in.
+ def configure_sparse_checkout(worktree_git_path, files)
+ run_git!(%w(config core.sparseCheckout true))
+
+ return if files.empty?
+
+ worktree_info_path = File.join(worktree_git_path, 'info')
+ FileUtils.mkdir_p(worktree_info_path)
+ File.write(File.join(worktree_info_path, 'sparse-checkout'), files)
+ end
+
+ def gitaly_fsck
+ gitaly_repository_client.fsck
+ end
+
+ def shell_fsck
+ run_git(%W[--git-dir=#{path} fsck], nice: true)
+ end
+
+ def rugged_fetch_source_branch(source_repository, source_branch, local_ref)
+ with_repo_branch_commit(source_repository, source_branch) do |commit|
+ if commit
+ write_ref(local_ref, commit.sha)
+ true
+ else
+ false
+ end
+ end
+ end
+
+ def worktree_path(prefix, id)
+ id = id.to_s
+ raise ArgumentError, "worktree id can't be empty" unless id.present?
+ raise ArgumentError, "worktree id can't contain slashes " if id.include?("/")
+
+ File.join(path, 'gitlab-worktree', "#{prefix}-#{id}")
+ end
+
+ def git_env_for_user(user)
+ {
+ 'GIT_COMMITTER_NAME' => user.name,
+ 'GIT_COMMITTER_EMAIL' => user.email,
+ 'GL_ID' => Gitlab::GlId.gl_id(user),
+ 'GL_PROTOCOL' => Gitlab::Git::Hook::GL_PROTOCOL,
+ 'GL_REPOSITORY' => gl_repository
+ }
+ end
+
# Gitaly note: JV: Trying to get rid of the 'filter' option so we can implement this with 'git'.
def branches_filter(filter: nil, sort_by: nil)
- # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37464
- branches = Gitlab::GitalyClient.allow_n_plus_1_calls do
- rugged.branches.each(filter).map do |rugged_ref|
- begin
- target_commit = Gitlab::Git::Commit.find(self, rugged_ref.target)
- Gitlab::Git::Branch.new(self, rugged_ref.name, rugged_ref.target, target_commit)
- rescue Rugged::ReferenceError
- # Omit invalid branch
- end
- end.compact
- end
+ branches = rugged.branches.each(filter).map do |rugged_ref|
+ begin
+ target_commit = Gitlab::Git::Commit.find(self, rugged_ref.target)
+ Gitlab::Git::Branch.new(self, rugged_ref.name, rugged_ref.target, target_commit)
+ rescue Rugged::ReferenceError
+ # Omit invalid branch
+ end
+ end.compact
sort_branches(branches, sort_by)
end
- def log_using_shell?(options)
- options[:path].present? ||
- options[:disable_walk] ||
- options[:skip_merges] ||
- options[:after] ||
- options[:before]
+ def git_merged_branch_names(branch_names, root_sha)
+ git_arguments =
+ %W[branch --merged #{root_sha}
+ --format=%(refname:short)\ %(objectname)] + branch_names
+
+ lines = run_git(git_arguments).first.lines
+
+ lines.each_with_object([]) do |line, branches|
+ name, sha = line.strip.split(' ', 2)
+
+ branches << name if sha != root_sha
+ end
end
- def log_by_walk(sha, options)
- walk_options = {
- show: sha,
- sort: Rugged::SORT_NONE,
- limit: options[:limit],
- offset: options[:offset]
- }
- Rugged::Walker.walk(rugged, walk_options).to_a
+ def gitaly_merged_branch_names(branch_names, root_sha)
+ qualified_branch_names = branch_names.map { |b| "refs/heads/#{b}" }
+
+ gitaly_ref_client.merged_branches(qualified_branch_names)
+ .reject { |b| b.target == root_sha }
+ .map(&:name)
+ end
+
+ def process_count_commits_options(options)
+ if options[:from] || options[:to]
+ ref =
+ if options[:left_right] # Compare with merge-base for left-right
+ "#{options[:from]}...#{options[:to]}"
+ else
+ "#{options[:from]}..#{options[:to]}"
+ end
+
+ options.merge(ref: ref)
+
+ elsif options[:ref] && options[:left_right]
+ from, to = options[:ref].match(/\A([^\.]*)\.{2,3}([^\.]*)\z/)[1..2]
+
+ options.merge(from: from, to: to)
+ else
+ options
+ end
end
# Gitaly note: JV: although #log_by_shell shells out to Git I think the
@@ -1089,7 +1712,7 @@ module Gitlab
offset_in_ruby = use_follow_flag && options[:offset].present?
limit += offset if offset_in_ruby
- cmd = %W[#{Gitlab.config.git.bin_path} --git-dir=#{path} log]
+ cmd = %w[log]
cmd << "--max-count=#{limit}"
cmd << '--format=%H'
cmd << "--skip=#{offset}" unless offset_in_ruby
@@ -1097,7 +1720,12 @@ module Gitlab
cmd << '--no-merges' if options[:skip_merges]
cmd << "--after=#{options[:after].iso8601}" if options[:after]
cmd << "--before=#{options[:before].iso8601}" if options[:before]
- cmd << sha
+
+ if options[:all]
+ cmd += %w[--all --reverse]
+ else
+ cmd << sha
+ end
# :path can be a string or an array of strings
if options[:path].present?
@@ -1105,7 +1733,7 @@ module Gitlab
cmd += Array(options[:path])
end
- raw_output = IO.popen(cmd) { |io| io.read }
+ raw_output, _status = run_git(cmd)
lines = offset_in_ruby ? raw_output.lines.drop(offset) : raw_output.lines
lines.map! { |c| Rugged::Commit.new(rugged, c.strip) }
@@ -1143,7 +1771,21 @@ module Gitlab
end
def alternate_object_directories
- Gitlab::Git::Env.all.values_at(*ALLOWED_OBJECT_DIRECTORIES_VARIABLES).compact
+ relative_paths = relative_object_directories
+
+ if relative_paths.any?
+ relative_paths.map { |d| File.join(path, d) }
+ else
+ absolute_object_directories.flat_map { |d| d.split(File::PATH_SEPARATOR) }
+ end
+ end
+
+ def relative_object_directories
+ Gitlab::Git::Env.all.values_at(*ALLOWED_OBJECT_RELATIVE_DIRECTORIES_VARIABLES).flatten.compact
+ end
+
+ def absolute_object_directories
+ Gitlab::Git::Env.all.values_at(*ALLOWED_OBJECT_DIRECTORIES_VARIABLES).flatten.compact
end
# Get the content of a blob for a given commit. If the blob is a commit
@@ -1200,6 +1842,7 @@ module Gitlab
end
return nil unless tmp_entry.type == :tree
+
tmp_entry = tmp_entry[dir]
end
end
@@ -1270,19 +1913,68 @@ module Gitlab
end
def count_commits_by_gitaly(options)
- gitaly_commit_client.commit_count(options[:ref], options)
+ if options[:left_right]
+ from = options[:from]
+ to = options[:to]
+
+ right_count = gitaly_commit_client
+ .commit_count("#{from}..#{to}", options)
+ left_count = gitaly_commit_client
+ .commit_count("#{to}..#{from}", options)
+
+ [left_count, right_count]
+ else
+ gitaly_commit_client.commit_count(options[:ref], options)
+ end
end
def count_commits_by_shelling_out(options)
- cmd = %W[#{Gitlab.config.git.bin_path} --git-dir=#{path} rev-list]
+ cmd = count_commits_shelling_command(options)
+
+ raw_output, _status = run_git(cmd)
+
+ process_count_commits_raw_output(raw_output, options)
+ end
+
+ def count_commits_shelling_command(options)
+ cmd = %w[rev-list]
cmd << "--after=#{options[:after].iso8601}" if options[:after]
cmd << "--before=#{options[:before].iso8601}" if options[:before]
- cmd += %W[--count #{options[:ref]}]
+ cmd << "--max-count=#{options[:max_count]}" if options[:max_count]
+ cmd << "--left-right" if options[:left_right]
+ cmd << '--count'
+
+ cmd << if options[:all]
+ '--all'
+ elsif options[:ref]
+ options[:ref]
+ else
+ raise ArgumentError, "Please specify a valid ref or set the 'all' attribute to true"
+ end
+
cmd += %W[-- #{options[:path]}] if options[:path].present?
+ cmd
+ end
+
+ def process_count_commits_raw_output(raw_output, options)
+ if options[:left_right]
+ result = raw_output.scan(/\d+/).map(&:to_i)
- raw_output = IO.popen(cmd) { |io| io.read }
+ if result.sum != options[:max_count]
+ result
+ else # Reaching max count, right is not accurate
+ right_option =
+ process_count_commits_options(options
+ .except(:left_right, :from, :to)
+ .merge(ref: options[:to]))
- raw_output.to_i
+ right = count_commits_by_shelling_out(right_option)
+
+ [result.first, right] # left should be accurate in the first call
+ end
+ else
+ raw_output.to_i
+ end
end
def gitaly_ls_files(ref)
@@ -1299,20 +1991,17 @@ module Gitlab
return []
end
- cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{path} ls-tree)
- cmd += %w(-r)
- cmd += %w(--full-tree)
- cmd += %w(--full-name)
- cmd += %W(-- #{actual_ref})
+ cmd = %W(ls-tree -r --full-tree --full-name -- #{actual_ref})
+ raw_output, _status = run_git(cmd)
- raw_output = IO.popen(cmd, &:read).split("\n").map do |f|
+ lines = raw_output.split("\n").map do |f|
stuff, path = f.split("\t")
_mode, type, _sha = stuff.split(" ")
path if type == "blob"
# Contain only blob type
end
- raw_output.compact
+ lines.compact
end
# Returns true if the given ref name exists
@@ -1320,6 +2009,7 @@ module Gitlab
# Ref names must start with `refs/`.
def rugged_ref_exists?(ref_name)
raise ArgumentError, 'invalid refname' unless ref_name.start_with?('refs/')
+
rugged.references.exist?(ref_name)
rescue Rugged::ReferenceError
false
@@ -1353,12 +2043,40 @@ module Gitlab
false
end
+ def gitaly_add_tag(tag_name, user:, target:, message: nil)
+ gitaly_operations_client.add_tag(tag_name, user, target, message)
+ end
+
+ def rugged_add_tag(tag_name, user:, target:, message: nil)
+ target_object = Ref.dereference_object(lookup(target))
+ raise InvalidRef.new("target not found: #{target}") unless target_object
+
+ user = Gitlab::Git::User.from_gitlab(user) unless user.respond_to?(:gl_id)
+
+ options = nil # Use nil, not the empty hash. Rugged cares about this.
+ if message
+ options = {
+ message: message,
+ tagger: Gitlab::Git.committer_hash(email: user.email, name: user.name)
+ }
+ end
+
+ Gitlab::Git::OperationService.new(user, self).add_tag(tag_name, target_object.oid, options)
+
+ find_tag(tag_name)
+ rescue Rugged::ReferenceError => ex
+ raise InvalidRef, ex
+ rescue Rugged::TagError
+ raise TagExistsError
+ end
+
def rugged_create_branch(ref, start_point)
rugged_ref = rugged.branches.create(ref, start_point)
target_commit = Gitlab::Git::Commit.find(self, rugged_ref.target)
Gitlab::Git::Branch.new(self, rugged_ref.name, rugged_ref.target, target_commit)
rescue Rugged::ReferenceError => e
- raise InvalidRef.new("Branch #{ref} already exists") if e.to_s =~ /'refs\/heads\/#{ref}'/
+ raise InvalidRef.new("Branch #{ref} already exists") if e.to_s =~ %r{'refs/heads/#{ref}'}
+
raise InvalidRef.new("Invalid reference #{start_point}")
end
@@ -1395,6 +2113,333 @@ module Gitlab
file.write(gitattributes_content)
end
end
+
+ def rugged_revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ OperationService.new(user, self).with_branch(
+ branch_name,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ ) do |start_commit|
+
+ Gitlab::Git.check_namespace!(commit, start_repository)
+
+ revert_tree_id = check_revert_content(commit, start_commit.sha)
+ raise CreateTreeError unless revert_tree_id
+
+ committer = user_to_committer(user)
+
+ create_commit(message: message,
+ author: committer,
+ committer: committer,
+ tree: revert_tree_id,
+ parents: [start_commit.sha])
+ end
+ end
+
+ def gitaly_add_branch(branch_name, user, target)
+ gitaly_operation_client.user_create_branch(branch_name, user, target)
+ rescue GRPC::FailedPrecondition => ex
+ raise InvalidRef, ex
+ end
+
+ def rugged_add_branch(branch_name, user, target)
+ target_object = Ref.dereference_object(lookup(target))
+ raise InvalidRef.new("target not found: #{target}") unless target_object
+
+ OperationService.new(user, self).add_branch(branch_name, target_object.oid)
+ find_branch(branch_name)
+ rescue Rugged::ReferenceError => ex
+ raise InvalidRef, ex
+ end
+
+ def rugged_cherry_pick(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ OperationService.new(user, self).with_branch(
+ branch_name,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ ) do |start_commit|
+
+ Gitlab::Git.check_namespace!(commit, start_repository)
+
+ cherry_pick_tree_id = check_cherry_pick_content(commit, start_commit.sha)
+ raise CreateTreeError unless cherry_pick_tree_id
+
+ committer = user_to_committer(user)
+
+ create_commit(message: message,
+ author: {
+ email: commit.author_email,
+ name: commit.author_name,
+ time: commit.authored_date
+ },
+ committer: committer,
+ tree: cherry_pick_tree_id,
+ parents: [start_commit.sha])
+ end
+ end
+
+ def check_cherry_pick_content(target_commit, source_sha)
+ args = [target_commit.sha, source_sha]
+ args << 1 if target_commit.merge_commit?
+
+ cherry_pick_index = rugged.cherrypick_commit(*args)
+ return false if cherry_pick_index.conflicts?
+
+ tree_id = cherry_pick_index.write_tree(rugged)
+ return false unless diff_exists?(source_sha, tree_id)
+
+ tree_id
+ end
+
+ def gitaly_rebase(user, rebase_id, branch:, branch_sha:, remote_repository:, remote_branch:)
+ gitaly_operation_client.user_rebase(user, rebase_id,
+ branch: branch,
+ branch_sha: branch_sha,
+ remote_repository: remote_repository,
+ remote_branch: remote_branch)
+ end
+
+ def git_rebase(user, rebase_id, branch:, branch_sha:, remote_repository:, remote_branch:)
+ rebase_path = worktree_path(REBASE_WORKTREE_PREFIX, rebase_id)
+ env = git_env_for_user(user)
+
+ if remote_repository.is_a?(RemoteRepository)
+ env.merge!(remote_repository.fetch_env)
+ remote_repo_path = GITALY_INTERNAL_URL
+ else
+ remote_repo_path = remote_repository.path
+ end
+
+ with_worktree(rebase_path, branch, env: env) do
+ run_git!(
+ %W(pull --rebase #{remote_repo_path} #{remote_branch}),
+ chdir: rebase_path, env: env
+ )
+
+ rebase_sha = run_git!(%w(rev-parse HEAD), chdir: rebase_path, env: env).strip
+
+ Gitlab::Git::OperationService.new(user, self)
+ .update_branch(branch, rebase_sha, branch_sha)
+
+ rebase_sha
+ end
+ end
+
+ def git_squash(user, squash_id, branch, start_sha, end_sha, author, message)
+ squash_path = worktree_path(SQUASH_WORKTREE_PREFIX, squash_id)
+ env = git_env_for_user(user).merge(
+ 'GIT_AUTHOR_NAME' => author.name,
+ 'GIT_AUTHOR_EMAIL' => author.email
+ )
+ diff_range = "#{start_sha}...#{end_sha}"
+ diff_files = run_git!(
+ %W(diff --name-only --diff-filter=ar --binary #{diff_range})
+ ).chomp
+
+ with_worktree(squash_path, branch, sparse_checkout_files: diff_files, env: env) do
+ # Apply diff of the `diff_range` to the worktree
+ diff = run_git!(%W(diff --binary #{diff_range}))
+ run_git!(%w(apply --index --whitespace=nowarn), chdir: squash_path, env: env) do |stdin|
+ stdin.binmode
+ stdin.write(diff)
+ end
+
+ # Commit the `diff_range` diff
+ run_git!(%W(commit --no-verify --message #{message}), chdir: squash_path, env: env)
+
+ # Return the squash sha. May print a warning for ambiguous refs, but
+ # we can ignore that with `--quiet` and just take the SHA, if present.
+ # HEAD here always refers to the current HEAD commit, even if there is
+ # another ref called HEAD.
+ run_git!(
+ %w(rev-parse --quiet --verify HEAD), chdir: squash_path, env: env
+ ).chomp
+ end
+ end
+
+ def local_fetch_ref(source_path, source_ref:, target_ref:)
+ args = %W(fetch --no-tags -f #{source_path} #{source_ref}:#{target_ref})
+ run_git(args)
+ end
+
+ def gitaly_fetch_ref(source_repository, source_ref:, target_ref:)
+ args = %W(fetch --no-tags -f #{GITALY_INTERNAL_URL} #{source_ref}:#{target_ref})
+
+ run_git(args, env: source_repository.fetch_env)
+ end
+
+ def gitaly_ff_merge(user, source_sha, target_branch)
+ gitaly_operations_client.user_ff_branch(user, source_sha, target_branch)
+ rescue GRPC::FailedPrecondition => e
+ raise CommitError, e
+ end
+
+ def rugged_ff_merge(user, source_sha, target_branch)
+ OperationService.new(user, self).with_branch(target_branch) do |our_commit|
+ raise ArgumentError, 'Invalid merge target' unless our_commit
+
+ source_sha
+ end
+ rescue Rugged::ReferenceError, InvalidRef
+ raise ArgumentError, 'Invalid merge source'
+ end
+
+ def rugged_add_remote(remote_name, url, mirror_refmap)
+ rugged.remotes.create(remote_name, url)
+
+ set_remote_as_mirror(remote_name, refmap: mirror_refmap) if mirror_refmap
+ rescue Rugged::ConfigError
+ remote_update(remote_name, url: url)
+ end
+
+ def git_delete_refs(*ref_names)
+ instructions = ref_names.map do |ref|
+ "delete #{ref}\x00\x00"
+ end
+
+ message, status = run_git(%w[update-ref --stdin -z]) do |stdin|
+ stdin.write(instructions.join)
+ end
+
+ unless status.zero?
+ raise GitError.new("Could not delete refs #{ref_names}: #{message}")
+ end
+ end
+
+ def gitaly_delete_refs(*ref_names)
+ gitaly_ref_client.delete_refs(refs: ref_names)
+ end
+
+ def rugged_remove_remote(remote_name)
+ # When a remote is deleted all its remote refs are deleted too, but in
+ # the case of mirrors we map its refs (that would usualy go under
+ # [remote_name]/) to the top level namespace. We clean the mapping so
+ # those don't get deleted.
+ if rugged.config["remote.#{remote_name}.mirror"]
+ rugged.config.delete("remote.#{remote_name}.fetch")
+ end
+
+ rugged.remotes.delete(remote_name)
+ true
+ rescue Rugged::ConfigError
+ false
+ end
+
+ def rugged_fetch_repository_as_mirror(repository)
+ remote_name = "tmp-#{SecureRandom.hex}"
+ repository = RemoteRepository.new(repository) unless repository.is_a?(RemoteRepository)
+
+ add_remote(remote_name, GITALY_INTERNAL_URL, mirror_refmap: :all_refs)
+ fetch_remote(remote_name, env: repository.fetch_env)
+ ensure
+ remove_remote(remote_name)
+ end
+
+ def rugged_multi_action(
+ user, branch_name, message, actions, author_email, author_name,
+ start_branch_name, start_repository)
+
+ OperationService.new(user, self).with_branch(
+ branch_name,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ ) do |start_commit|
+ index = Gitlab::Git::Index.new(self)
+ parents = []
+
+ if start_commit
+ index.read_tree(start_commit.rugged_commit.tree)
+ parents = [start_commit.sha]
+ end
+
+ actions.each { |opts| index.apply(opts.delete(:action), opts) }
+
+ committer = user_to_committer(user)
+ author = Gitlab::Git.committer_hash(email: author_email, name: author_name) || committer
+ options = {
+ tree: index.write_tree,
+ message: message,
+ parents: parents,
+ author: author,
+ committer: committer
+ }
+
+ create_commit(options)
+ end
+ end
+
+ def fetch_remote(remote_name = 'origin', env: nil)
+ run_git(['fetch', remote_name], env: env).last.zero?
+ end
+
+ def gitaly_can_be_merged?(their_commit, our_commit)
+ !gitaly_conflicts_client(our_commit, their_commit).conflicts?
+ end
+
+ def rugged_can_be_merged?(their_commit, our_commit)
+ !rugged.merge_commits(our_commit, their_commit).conflicts?
+ end
+
+ def gitlab_projects_error
+ raise CommandError, @gitlab_projects.output
+ end
+
+ def find_commits_by_message_by_shelling_out(query, ref, path, limit, offset)
+ ref ||= root_ref
+
+ args = %W(
+ log #{ref} --pretty=%H --skip #{offset}
+ --max-count #{limit} --grep=#{query} --regexp-ignore-case
+ )
+ args = args.concat(%W(-- #{path})) if path.present?
+
+ git_log_results = run_git(args).first.lines
+
+ git_log_results.map { |c| commit(c.chomp) }.compact
+ end
+
+ def find_commits_by_message_by_gitaly(query, ref, path, limit, offset)
+ gitaly_commit_client
+ .commits_by_message(query, revision: ref, path: path, limit: limit, offset: offset)
+ .map { |c| commit(c) }
+ end
+
+ def gitaly_can_be_merged?(their_commit, our_commit)
+ !gitaly_conflicts_client(our_commit, their_commit).conflicts?
+ end
+
+ def rugged_can_be_merged?(their_commit, our_commit)
+ !rugged.merge_commits(our_commit, their_commit).conflicts?
+ end
+
+ def last_commit_for_path_by_gitaly(sha, path)
+ gitaly_commit_client.last_commit_for_path(sha, path)
+ end
+
+ def last_commit_id_for_path_by_shelling_out(sha, path)
+ args = %W(rev-list --max-count=1 #{sha} -- #{path})
+ run_git_with_timeout(args, Gitlab::Git::Popen::FAST_GIT_PROCESS_TIMEOUT).first.strip
+ end
+
+ def rugged_merge_base(from, to)
+ rugged.merge_base(from, to)
+ rescue Rugged::ReferenceError
+ nil
+ end
+
+ def rugged_commit_count(ref)
+ walker = Rugged::Walker.new(rugged)
+ walker.sorting(Rugged::SORT_TOPO | Rugged::SORT_REVERSE)
+ oid = rugged.rev_parse_oid(ref)
+ walker.push(oid)
+ walker.count
+ rescue Rugged::ReferenceError
+ 0
+ end
+
+ def rev_list_param(spec)
+ spec == :all ? ['--all'] : spec
+ end
end
end
end
diff --git a/lib/gitlab/git/repository_mirroring.rb b/lib/gitlab/git/repository_mirroring.rb
new file mode 100644
index 00000000000..dc424a433fb
--- /dev/null
+++ b/lib/gitlab/git/repository_mirroring.rb
@@ -0,0 +1,91 @@
+module Gitlab
+ module Git
+ module RepositoryMirroring
+ REFMAPS = {
+ # With `:all_refs`, the repository is equivalent to the result of `git clone --mirror`
+ all_refs: '+refs/*:refs/*',
+ heads: '+refs/heads/*:refs/heads/*',
+ tags: '+refs/tags/*:refs/tags/*'
+ }.freeze
+
+ RemoteError = Class.new(StandardError)
+
+ def set_remote_as_mirror(remote_name, refmap: :all_refs)
+ set_remote_refmap(remote_name, refmap)
+
+ rugged.config["remote.#{remote_name}.mirror"] = true
+ rugged.config["remote.#{remote_name}.prune"] = true
+ end
+
+ def remote_tags(remote)
+ # Each line has this format: "dc872e9fa6963f8f03da6c8f6f264d0845d6b092\trefs/tags/v1.10.0\n"
+ # We want to convert it to: [{ 'v1.10.0' => 'dc872e9fa6963f8f03da6c8f6f264d0845d6b092' }, ...]
+ list_remote_tags(remote).map do |line|
+ target, path = line.strip.split("\t")
+
+ # When the remote repo does not have tags.
+ if target.nil? || path.nil?
+ Rails.logger.info "Empty or invalid list of tags for remote: #{remote}. Output: #{output}"
+ return []
+ end
+
+ name = path.split('/', 3).last
+ # We're only interested in tag references
+ # See: http://stackoverflow.com/questions/15472107/when-listing-git-ls-remote-why-theres-after-the-tag-name
+ next if name =~ /\^\{\}\Z/
+
+ target_commit = Gitlab::Git::Commit.find(self, target)
+ Gitlab::Git::Tag.new(self, name, target, target_commit)
+ end.compact
+ end
+
+ def remote_branches(remote_name)
+ branches = []
+
+ rugged.references.each("refs/remotes/#{remote_name}/*").map do |ref|
+ name = ref.name.sub(%r{\Arefs/remotes/#{remote_name}/}, '')
+
+ begin
+ target_commit = Gitlab::Git::Commit.find(self, ref.target)
+ branches << Gitlab::Git::Branch.new(self, name, ref.target, target_commit)
+ rescue Rugged::ReferenceError
+ # Omit invalid branch
+ end
+ end
+
+ branches
+ end
+
+ private
+
+ def set_remote_refmap(remote_name, refmap)
+ Array(refmap).each_with_index do |refspec, i|
+ refspec = REFMAPS[refspec] || refspec
+
+ # We need multiple `fetch` entries, but Rugged only allows replacing a config, not adding to it.
+ # To make sure we start from scratch, we set the first using rugged, and use `git` for any others
+ if i == 0
+ rugged.config["remote.#{remote_name}.fetch"] = refspec
+ else
+ run_git(%W[config --add remote.#{remote_name}.fetch #{refspec}])
+ end
+ end
+ end
+
+ def list_remote_tags(remote)
+ tag_list, exit_code, error = nil
+ cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{path} ls-remote --tags #{remote})
+
+ Open3.popen3(*cmd) do |stdin, stdout, stderr, wait_thr|
+ tag_list = stdout.read
+ error = stderr.read
+ exit_code = wait_thr.value.exitstatus
+ end
+
+ raise RemoteError, error unless exit_code.zero?
+
+ tag_list.split("\n")
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/rev_list.rb b/lib/gitlab/git/rev_list.rb
index e0943d3a3eb..38c3a55f96f 100644
--- a/lib/gitlab/git/rev_list.rb
+++ b/lib/gitlab/git/rev_list.rb
@@ -5,44 +5,73 @@ module Gitlab
class RevList
include Gitlab::Git::Popen
- attr_reader :oldrev, :newrev, :path_to_repo
+ attr_reader :oldrev, :newrev, :repository
- def initialize(path_to_repo:, newrev:, oldrev: nil)
+ def initialize(repository, newrev:, oldrev: nil)
@oldrev = oldrev
@newrev = newrev
- @path_to_repo = path_to_repo
+ @repository = repository
end
- # This method returns an array of new references
+ # This method returns an array of new commit references
def new_refs
- execute([*base_args, newrev, '--not', '--all'])
+ repository.rev_list(including: newrev, excluding: :all).split("\n")
+ end
+
+ # Finds newly added objects
+ # Returns an array of shas
+ #
+ # Can skip objects which do not have a path using required_path: true
+ # This skips commit objects and root trees, which might not be needed when
+ # looking for blobs
+ #
+ # When given a block it will yield objects as a lazy enumerator so
+ # the caller can limit work done instead of processing megabytes of data
+ def new_objects(require_path: nil, not_in: nil, &lazy_block)
+ opts = {
+ including: newrev,
+ excluding: not_in.nil? ? :all : not_in,
+ require_path: require_path
+ }
+
+ get_objects(opts, &lazy_block)
+ end
+
+ def all_objects(require_path: nil, &lazy_block)
+ get_objects(including: :all, require_path: require_path, &lazy_block)
end
# This methods returns an array of missed references
#
# Should become obsolete after https://gitlab.com/gitlab-org/gitaly/issues/348.
def missed_ref
- execute([*base_args, '--max-count=1', oldrev, "^#{newrev}"])
+ repository.missed_ref(oldrev, newrev).split("\n")
end
private
def execute(args)
- output, status = popen(args, nil, Gitlab::Git::Env.all.stringify_keys)
+ repository.rev_list(args).split("\n")
+ end
- unless status.zero?
- raise "Got a non-zero exit code while calling out `#{args.join(' ')}`."
- end
+ def get_objects(including: [], excluding: [], require_path: nil)
+ opts = { including: including, excluding: excluding, objects: true }
+
+ repository.rev_list(opts) do |lazy_output|
+ objects = objects_from_output(lazy_output, require_path: require_path)
- output.split("\n")
+ yield(objects)
+ end
end
- def base_args
- [
- Gitlab.config.git.bin_path,
- "--git-dir=#{path_to_repo}",
- 'rev-list'
- ]
+ def objects_from_output(object_output, require_path: nil)
+ object_output.map do |output_line|
+ sha, path = output_line.split(' ', 2)
+
+ next if require_path && path.to_s.empty?
+
+ sha
+ end.reject(&:nil?)
end
end
end
diff --git a/lib/gitlab/git/storage.rb b/lib/gitlab/git/storage.rb
index e28be4b8a38..5933312b0b5 100644
--- a/lib/gitlab/git/storage.rb
+++ b/lib/gitlab/git/storage.rb
@@ -11,8 +11,11 @@ module Gitlab
end
CircuitOpen = Class.new(Inaccessible)
+ Misconfiguration = Class.new(Inaccessible)
+ Failing = Class.new(Inaccessible)
REDIS_KEY_PREFIX = 'storage_accessible:'.freeze
+ REDIS_KNOWN_KEYS = "#{REDIS_KEY_PREFIX}known_keys_set".freeze
def self.redis
Gitlab::Redis::SharedState
diff --git a/lib/gitlab/git/storage/checker.rb b/lib/gitlab/git/storage/checker.rb
new file mode 100644
index 00000000000..d3c37f82101
--- /dev/null
+++ b/lib/gitlab/git/storage/checker.rb
@@ -0,0 +1,120 @@
+module Gitlab
+ module Git
+ module Storage
+ class Checker
+ include CircuitBreakerSettings
+
+ attr_reader :storage_path, :storage, :hostname, :logger
+ METRICS_MUTEX = Mutex.new
+ STORAGE_TIMING_BUCKETS = [0.1, 0.15, 0.25, 0.33, 0.5, 1, 1.5, 2.5, 5, 10, 15].freeze
+
+ def self.check_all(logger = Rails.logger)
+ threads = Gitlab.config.repositories.storages.keys.map do |storage_name|
+ Thread.new do
+ Thread.current[:result] = new(storage_name, logger).check_with_lease
+ end
+ end
+
+ threads.map do |thread|
+ thread.join
+ thread[:result]
+ end
+ end
+
+ def self.check_histogram
+ @check_histogram ||=
+ METRICS_MUTEX.synchronize do
+ @check_histogram || Gitlab::Metrics.histogram(:circuitbreaker_storage_check_duration_seconds,
+ 'Storage check time in seconds',
+ {},
+ STORAGE_TIMING_BUCKETS
+ )
+ end
+ end
+
+ def initialize(storage, logger = Rails.logger)
+ @storage = storage
+ config = Gitlab.config.repositories.storages[@storage]
+ @storage_path = config['path']
+ @logger = logger
+
+ @hostname = Gitlab::Environment.hostname
+ end
+
+ def check_with_lease
+ lease_key = "storage_check:#{cache_key}"
+ lease = Gitlab::ExclusiveLease.new(lease_key, timeout: storage_timeout)
+ result = { storage: storage, success: nil }
+
+ if uuid = lease.try_obtain
+ result[:success] = check
+
+ Gitlab::ExclusiveLease.cancel(lease_key, uuid)
+ else
+ logger.warn("#{hostname}: #{storage}: Skipping check, previous check still running")
+ end
+
+ result
+ end
+
+ def check
+ if perform_access_check
+ track_storage_accessible
+ true
+ else
+ track_storage_inaccessible
+ logger.error("#{hostname}: #{storage}: Not accessible.")
+ false
+ end
+ end
+
+ private
+
+ def perform_access_check
+ start_time = Gitlab::Metrics::System.monotonic_time
+
+ Gitlab::Git::Storage::ForkedStorageCheck.storage_available?(storage_path, storage_timeout, access_retries)
+ ensure
+ execution_time = Gitlab::Metrics::System.monotonic_time - start_time
+ self.class.check_histogram.observe({ storage: storage }, execution_time)
+ end
+
+ def track_storage_inaccessible
+ first_failure = current_failure_info.first_failure || Time.now
+ last_failure = Time.now
+
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.pipelined do
+ redis.hset(cache_key, :first_failure, first_failure.to_i)
+ redis.hset(cache_key, :last_failure, last_failure.to_i)
+ redis.hincrby(cache_key, :failure_count, 1)
+ redis.expire(cache_key, failure_reset_time)
+ maintain_known_keys(redis)
+ end
+ end
+ end
+
+ def track_storage_accessible
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.pipelined do
+ redis.hset(cache_key, :first_failure, nil)
+ redis.hset(cache_key, :last_failure, nil)
+ redis.hset(cache_key, :failure_count, 0)
+ maintain_known_keys(redis)
+ end
+ end
+ end
+
+ def maintain_known_keys(redis)
+ expire_time = Time.now.to_i + failure_reset_time
+ redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, expire_time, cache_key)
+ redis.zremrangebyscore(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, '-inf', Time.now.to_i)
+ end
+
+ def current_failure_info
+ FailureInfo.load(cache_key)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/storage/circuit_breaker.rb b/lib/gitlab/git/storage/circuit_breaker.rb
index 9ea9367d4b7..898bb1b65be 100644
--- a/lib/gitlab/git/storage/circuit_breaker.rb
+++ b/lib/gitlab/git/storage/circuit_breaker.rb
@@ -2,53 +2,43 @@ module Gitlab
module Git
module Storage
class CircuitBreaker
- FailureInfo = Struct.new(:last_failure, :failure_count)
+ include CircuitBreakerSettings
attr_reader :storage,
- :hostname,
- :storage_path,
- :failure_count_threshold,
- :failure_wait_time,
- :failure_reset_time,
- :storage_timeout
+ :hostname
- delegate :last_failure, :failure_count, to: :failure_info
-
- def self.reset_all!
- pattern = "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}*"
-
- Gitlab::Git::Storage.redis.with do |redis|
- all_storage_keys = redis.keys(pattern)
- redis.del(*all_storage_keys) unless all_storage_keys.empty?
- end
-
- RequestStore.delete(:circuitbreaker_cache)
- end
+ delegate :last_failure, :failure_count, :no_failures?,
+ to: :failure_info
def self.for_storage(storage)
cached_circuitbreakers = RequestStore.fetch(:circuitbreaker_cache) do
Hash.new do |hash, storage_name|
- hash[storage_name] = new(storage_name)
+ hash[storage_name] = build(storage_name)
end
end
cached_circuitbreakers[storage]
end
- def initialize(storage, hostname = Gitlab::Environment.hostname)
+ def self.build(storage, hostname = Gitlab::Environment.hostname)
+ config = Gitlab.config.repositories.storages[storage]
+
+ if !config.present?
+ NullCircuitBreaker.new(storage, hostname, error: Misconfiguration.new("Storage '#{storage}' is not configured"))
+ elsif !config['path'].present?
+ NullCircuitBreaker.new(storage, hostname, error: Misconfiguration.new("Path for storage '#{storage}' is not configured"))
+ else
+ new(storage, hostname)
+ end
+ end
+
+ def initialize(storage, hostname)
@storage = storage
@hostname = hostname
-
- config = Gitlab.config.repositories.storages[@storage]
- @storage_path = config['path']
- @failure_count_threshold = config['failure_count_threshold']
- @failure_wait_time = config['failure_wait_time']
- @failure_reset_time = config['failure_reset_time']
- @storage_timeout = config['storage_timeout']
end
def perform
- return yield unless Feature.enabled?('git_storage_circuit_breaker')
+ return yield unless enabled?
check_storage_accessible!
@@ -58,85 +48,28 @@ module Gitlab
def circuit_broken?
return false if no_failures?
- recent_failure = last_failure > failure_wait_time.seconds.ago
- too_many_failures = failure_count > failure_count_threshold
-
- recent_failure || too_many_failures
- end
-
- # Memoizing the `storage_available` call means we only do it once per
- # request when the storage is available.
- #
- # When the storage appears not available, and the memoized value is `false`
- # we might want to try again.
- def storage_available?
- return @storage_available if @storage_available
-
- if @storage_available = Gitlab::Git::Storage::ForkedStorageCheck
- .storage_available?(storage_path, storage_timeout)
- track_storage_accessible
- else
- track_storage_inaccessible
- end
-
- @storage_available
- end
-
- def check_storage_accessible!
- if circuit_broken?
- raise Gitlab::Git::Storage::CircuitOpen.new("Circuit for #{storage} is broken", failure_wait_time)
- end
-
- unless storage_available?
- raise Gitlab::Git::Storage::Inaccessible.new("#{storage} not accessible", failure_wait_time)
- end
- end
-
- def no_failures?
- last_failure.blank? && failure_count == 0
- end
-
- def track_storage_inaccessible
- @failure_info = FailureInfo.new(Time.now, failure_count + 1)
-
- Gitlab::Git::Storage.redis.with do |redis|
- redis.pipelined do
- redis.hset(cache_key, :last_failure, last_failure.to_i)
- redis.hincrby(cache_key, :failure_count, 1)
- redis.expire(cache_key, failure_reset_time)
- end
- end
+ failure_count > failure_count_threshold
end
- def track_storage_accessible
- return if no_failures?
+ private
- @failure_info = FailureInfo.new(nil, 0)
-
- Gitlab::Git::Storage.redis.with do |redis|
- redis.pipelined do
- redis.hset(cache_key, :last_failure, nil)
- redis.hset(cache_key, :failure_count, 0)
- end
- end
+ # The circuitbreaker can be enabled for the entire fleet using a Feature
+ # flag.
+ #
+ # Enabling it for a single host can be done setting the
+ # `GIT_STORAGE_CIRCUIT_BREAKER` environment variable.
+ def enabled?
+ ENV['GIT_STORAGE_CIRCUIT_BREAKER'].present? || Feature.enabled?('git_storage_circuit_breaker')
end
def failure_info
- @failure_info ||= get_failure_info
+ @failure_info ||= FailureInfo.load(cache_key)
end
- def get_failure_info
- last_failure, failure_count = Gitlab::Git::Storage.redis.with do |redis|
- redis.hmget(cache_key, :last_failure, :failure_count)
+ def check_storage_accessible!
+ if circuit_broken?
+ raise Gitlab::Git::Storage::CircuitOpen.new("Circuit for #{storage} is broken", failure_reset_time)
end
-
- last_failure = Time.at(last_failure.to_i) if last_failure.present?
-
- FailureInfo.new(last_failure, failure_count.to_i)
- end
-
- def cache_key
- @cache_key ||= "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage}:#{hostname}"
end
end
end
diff --git a/lib/gitlab/git/storage/circuit_breaker_settings.rb b/lib/gitlab/git/storage/circuit_breaker_settings.rb
new file mode 100644
index 00000000000..c9e225f187d
--- /dev/null
+++ b/lib/gitlab/git/storage/circuit_breaker_settings.rb
@@ -0,0 +1,37 @@
+module Gitlab
+ module Git
+ module Storage
+ module CircuitBreakerSettings
+ def failure_count_threshold
+ application_settings.circuitbreaker_failure_count_threshold
+ end
+
+ def failure_reset_time
+ application_settings.circuitbreaker_failure_reset_time
+ end
+
+ def storage_timeout
+ application_settings.circuitbreaker_storage_timeout
+ end
+
+ def access_retries
+ application_settings.circuitbreaker_access_retries
+ end
+
+ def check_interval
+ application_settings.circuitbreaker_check_interval
+ end
+
+ def cache_key
+ @cache_key ||= "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage}:#{hostname}"
+ end
+
+ private
+
+ def application_settings
+ Gitlab::CurrentSettings.current_application_settings
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/storage/failure_info.rb b/lib/gitlab/git/storage/failure_info.rb
new file mode 100644
index 00000000000..387279c110d
--- /dev/null
+++ b/lib/gitlab/git/storage/failure_info.rb
@@ -0,0 +1,39 @@
+module Gitlab
+ module Git
+ module Storage
+ class FailureInfo
+ attr_accessor :first_failure, :last_failure, :failure_count
+
+ def self.reset_all!
+ Gitlab::Git::Storage.redis.with do |redis|
+ all_storage_keys = redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1)
+ redis.del(*all_storage_keys) unless all_storage_keys.empty?
+ end
+
+ RequestStore.delete(:circuitbreaker_cache)
+ end
+
+ def self.load(cache_key)
+ first_failure, last_failure, failure_count = Gitlab::Git::Storage.redis.with do |redis|
+ redis.hmget(cache_key, :first_failure, :last_failure, :failure_count)
+ end
+
+ last_failure = Time.at(last_failure.to_i) if last_failure.present?
+ first_failure = Time.at(first_failure.to_i) if first_failure.present?
+
+ new(first_failure, last_failure, failure_count.to_i)
+ end
+
+ def initialize(first_failure, last_failure, failure_count)
+ @first_failure = first_failure
+ @last_failure = last_failure
+ @failure_count = failure_count
+ end
+
+ def no_failures?
+ first_failure.blank? && last_failure.blank? && failure_count == 0
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/storage/forked_storage_check.rb b/lib/gitlab/git/storage/forked_storage_check.rb
index 91d8241f17b..0a4e557b59b 100644
--- a/lib/gitlab/git/storage/forked_storage_check.rb
+++ b/lib/gitlab/git/storage/forked_storage_check.rb
@@ -4,8 +4,17 @@ module Gitlab
module ForkedStorageCheck
extend self
- def storage_available?(path, timeout_seconds = 5)
- status = timeout_check(path, timeout_seconds)
+ def storage_available?(path, timeout_seconds = 5, retries = 1)
+ partial_timeout = timeout_seconds / retries
+ status = timeout_check(path, partial_timeout)
+
+ # If the status check did not succeed the first time, we retry a few
+ # more times to avoid one-off failures
+ current_attempts = 1
+ while current_attempts < retries && !status.success?
+ status = timeout_check(path, partial_timeout)
+ current_attempts += 1
+ end
status.success?
end
@@ -18,6 +27,7 @@ module Gitlab
status = nil
while status.nil?
+
if deadline > Time.now.utc
sleep(wait_time)
_pid, status = Process.wait2(filesystem_check_pid, Process::WNOHANG)
diff --git a/lib/gitlab/git/storage/health.rb b/lib/gitlab/git/storage/health.rb
index 2d723147f4f..90bbe85fd37 100644
--- a/lib/gitlab/git/storage/health.rb
+++ b/lib/gitlab/git/storage/health.rb
@@ -4,8 +4,8 @@ module Gitlab
class Health
attr_reader :storage_name, :info
- def self.pattern_for_storage(storage_name)
- "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage_name}:*"
+ def self.prefix_for_storage(storage_name)
+ "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage_name}:"
end
def self.for_all_storages
@@ -23,26 +23,25 @@ module Gitlab
end
end
- def self.all_keys_for_storages(storage_names, redis)
+ private_class_method def self.all_keys_for_storages(storage_names, redis)
keys_per_storage = {}
+ all_keys = redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1)
- redis.pipelined do
- storage_names.each do |storage_name|
- pattern = pattern_for_storage(storage_name)
+ storage_names.each do |storage_name|
+ prefix = prefix_for_storage(storage_name)
- keys_per_storage[storage_name] = redis.keys(pattern)
- end
+ keys_per_storage[storage_name] = all_keys.select { |key| key.starts_with?(prefix) }
end
keys_per_storage
end
- def self.load_for_keys(keys_per_storage, redis)
+ private_class_method def self.load_for_keys(keys_per_storage, redis)
info_for_keys = {}
redis.pipelined do
keys_per_storage.each do |storage_name, keys_future|
- info_for_storage = keys_future.value.map do |key|
+ info_for_storage = keys_future.map do |key|
{ name: key, failure_count: redis.hget(key, :failure_count) }
end
@@ -78,7 +77,7 @@ module Gitlab
def failing_circuit_breakers
@failing_circuit_breakers ||= failing_on_hosts.map do |hostname|
- CircuitBreaker.new(storage_name, hostname)
+ CircuitBreaker.build(storage_name, hostname)
end
end
diff --git a/lib/gitlab/git/storage/null_circuit_breaker.rb b/lib/gitlab/git/storage/null_circuit_breaker.rb
new file mode 100644
index 00000000000..261c936c689
--- /dev/null
+++ b/lib/gitlab/git/storage/null_circuit_breaker.rb
@@ -0,0 +1,50 @@
+module Gitlab
+ module Git
+ module Storage
+ class NullCircuitBreaker
+ include CircuitBreakerSettings
+
+ # These will have actual values
+ attr_reader :storage,
+ :hostname
+
+ # These will always have nil values
+ attr_reader :storage_path
+
+ delegate :last_failure, :failure_count, :no_failures?,
+ to: :failure_info
+
+ def initialize(storage, hostname, error: nil)
+ @storage = storage
+ @hostname = hostname
+ @error = error
+ end
+
+ def perform
+ @error ? raise(@error) : yield
+ end
+
+ def circuit_broken?
+ !!@error
+ end
+
+ def backing_off?
+ false
+ end
+
+ def failure_info
+ @failure_info ||=
+ if circuit_broken?
+ Gitlab::Git::Storage::FailureInfo.new(Time.now,
+ Time.now,
+ failure_count_threshold)
+ else
+ Gitlab::Git::Storage::FailureInfo.new(nil,
+ nil,
+ 0)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/tag.rb b/lib/gitlab/git/tag.rb
index bc4e160dce9..8a8f7b051ed 100644
--- a/lib/gitlab/git/tag.rb
+++ b/lib/gitlab/git/tag.rb
@@ -1,5 +1,3 @@
-# Gitaly note: JV: no RPC's here.
-#
module Gitlab
module Git
class Tag < Ref
diff --git a/lib/gitlab/git/tree.rb b/lib/gitlab/git/tree.rb
index 5cf336af3c6..b6ceb542dd1 100644
--- a/lib/gitlab/git/tree.rb
+++ b/lib/gitlab/git/tree.rb
@@ -14,14 +14,14 @@ module Gitlab
# Uses rugged for raw objects
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/320
- def where(repository, sha, path = nil)
+ def where(repository, sha, path = nil, recursive = false)
path = nil if path == '' || path == '/'
Gitlab::GitalyClient.migrate(:tree_entries) do |is_enabled|
if is_enabled
- repository.gitaly_commit_client.tree_entries(repository, sha, path)
+ repository.gitaly_commit_client.tree_entries(repository, sha, path, recursive)
else
- tree_entries_from_rugged(repository, sha, path)
+ tree_entries_from_rugged(repository, sha, path, recursive)
end
end
end
@@ -57,7 +57,22 @@ module Gitlab
end
end
- def tree_entries_from_rugged(repository, sha, path)
+ def tree_entries_from_rugged(repository, sha, path, recursive)
+ current_path_entries = get_tree_entries_from_rugged(repository, sha, path)
+ ordered_entries = []
+
+ current_path_entries.each do |entry|
+ ordered_entries << entry
+
+ if recursive && entry.dir?
+ ordered_entries.concat(tree_entries_from_rugged(repository, sha, entry.path, true))
+ end
+ end
+
+ ordered_entries
+ end
+
+ def get_tree_entries_from_rugged(repository, sha, path)
commit = repository.lookup(sha)
root_tree = commit.tree
@@ -83,6 +98,8 @@ module Gitlab
commit_id: sha
)
end
+ rescue Rugged::ReferenceError
+ []
end
end
diff --git a/lib/gitlab/git/user.rb b/lib/gitlab/git/user.rb
index ea634d39668..e573cd0e143 100644
--- a/lib/gitlab/git/user.rb
+++ b/lib/gitlab/git/user.rb
@@ -1,24 +1,34 @@
module Gitlab
module Git
class User
- attr_reader :name, :email, :gl_id
+ attr_reader :username, :name, :email, :gl_id
def self.from_gitlab(gitlab_user)
- new(gitlab_user.name, gitlab_user.email, Gitlab::GlId.gl_id(gitlab_user))
+ new(gitlab_user.username, gitlab_user.name, gitlab_user.email, Gitlab::GlId.gl_id(gitlab_user))
end
def self.from_gitaly(gitaly_user)
- new(gitaly_user.name, gitaly_user.email, gitaly_user.gl_id)
+ new(
+ gitaly_user.gl_username,
+ Gitlab::EncodingHelper.encode!(gitaly_user.name),
+ Gitlab::EncodingHelper.encode!(gitaly_user.email),
+ gitaly_user.gl_id
+ )
end
- def initialize(name, email, gl_id)
+ def initialize(username, name, email, gl_id)
+ @username = username
@name = name
@email = email
@gl_id = gl_id
end
def ==(other)
- [name, email, gl_id] == [other.name, other.email, other.gl_id]
+ [username, name, email, gl_id] == [other.username, other.name, other.email, other.gl_id]
+ end
+
+ def to_gitaly
+ Gitaly::User.new(gl_username: username, gl_id: gl_id, name: name.b, email: email.b)
end
end
end
diff --git a/lib/gitlab/git/wiki.rb b/lib/gitlab/git/wiki.rb
new file mode 100644
index 00000000000..52b44b9b3c5
--- /dev/null
+++ b/lib/gitlab/git/wiki.rb
@@ -0,0 +1,296 @@
+module Gitlab
+ module Git
+ class Wiki
+ DuplicatePageError = Class.new(StandardError)
+
+ CommitDetails = Struct.new(:name, :email, :message) do
+ def to_h
+ { name: name, email: email, message: message }
+ end
+ end
+ PageBlob = Struct.new(:name)
+
+ attr_reader :repository
+
+ def self.default_ref
+ 'master'
+ end
+
+ # Initialize with a Gitlab::Git::Repository instance
+ def initialize(repository)
+ @repository = repository
+ end
+
+ def repository_exists?
+ @repository.exists?
+ end
+
+ def write_page(name, format, content, commit_details)
+ @repository.gitaly_migrate(:wiki_write_page) do |is_enabled|
+ if is_enabled
+ gitaly_write_page(name, format, content, commit_details)
+ gollum_wiki.clear_cache
+ else
+ gollum_write_page(name, format, content, commit_details)
+ end
+ end
+ end
+
+ def delete_page(page_path, commit_details)
+ @repository.gitaly_migrate(:wiki_delete_page) do |is_enabled|
+ if is_enabled
+ gitaly_delete_page(page_path, commit_details)
+ gollum_wiki.clear_cache
+ else
+ gollum_delete_page(page_path, commit_details)
+ end
+ end
+ end
+
+ def update_page(page_path, title, format, content, commit_details)
+ @repository.gitaly_migrate(:wiki_update_page) do |is_enabled|
+ if is_enabled
+ gitaly_update_page(page_path, title, format, content, commit_details)
+ gollum_wiki.clear_cache
+ else
+ gollum_update_page(page_path, title, format, content, commit_details)
+ end
+ end
+ end
+
+ def pages(limit: nil)
+ @repository.gitaly_migrate(:wiki_get_all_pages) do |is_enabled|
+ if is_enabled
+ gitaly_get_all_pages
+ else
+ gollum_get_all_pages(limit: limit)
+ end
+ end
+ end
+
+ def page(title:, version: nil, dir: nil)
+ @repository.gitaly_migrate(:wiki_find_page) do |is_enabled|
+ if is_enabled
+ gitaly_find_page(title: title, version: version, dir: dir)
+ else
+ gollum_find_page(title: title, version: version, dir: dir)
+ end
+ end
+ end
+
+ def file(name, version)
+ @repository.gitaly_migrate(:wiki_find_file) do |is_enabled|
+ if is_enabled
+ gitaly_find_file(name, version)
+ else
+ gollum_find_file(name, version)
+ end
+ end
+ end
+
+ # options:
+ # :page - The Integer page number.
+ # :per_page - The number of items per page.
+ # :limit - Total number of items to return.
+ def page_versions(page_path, options = {})
+ @repository.gitaly_migrate(:wiki_page_versions) do |is_enabled|
+ if is_enabled
+ versions = gitaly_wiki_client.page_versions(page_path, options)
+
+ # Gitaly uses gollum-lib to get the versions. Gollum defaults to 20
+ # per page, but also fetches 20 if `limit` or `per_page` < 20.
+ # Slicing returns an array with the expected number of items.
+ slice_bound = options[:limit] || options[:per_page] || Gollum::Page.per_page
+ versions[0..slice_bound]
+ else
+ current_page = gollum_page_by_path(page_path)
+
+ commits_from_page(current_page, options).map do |gitlab_git_commit|
+ gollum_page = gollum_wiki.page(current_page.title, gitlab_git_commit.id)
+ Gitlab::Git::WikiPageVersion.new(gitlab_git_commit, gollum_page&.format)
+ end
+ end
+ end
+ end
+
+ def count_page_versions(page_path)
+ @repository.count_commits(ref: 'HEAD', path: page_path)
+ end
+
+ def preview_slug(title, format)
+ # Adapted from gollum gem (Gollum::Wiki#preview_page) to avoid
+ # using Rugged through a Gollum::Wiki instance
+ page_class = Gollum::Page
+ page = page_class.new(nil)
+ ext = page_class.format_to_ext(format.to_sym)
+ name = page_class.cname(title) + '.' + ext
+ blob = PageBlob.new(name)
+ page.populate(blob)
+ page.url_path
+ end
+
+ def page_formatted_data(title:, dir: nil, version: nil)
+ version = version&.id
+
+ @repository.gitaly_migrate(:wiki_page_formatted_data) do |is_enabled|
+ if is_enabled
+ gitaly_wiki_client.get_formatted_data(title: title, dir: dir, version: version)
+ else
+ # We don't use #page because if wiki_find_page feature is enabled, we would
+ # get a page without formatted_data.
+ gollum_find_page(title: title, dir: dir, version: version)&.formatted_data
+ end
+ end
+ end
+
+ private
+
+ # options:
+ # :page - The Integer page number.
+ # :per_page - The number of items per page.
+ # :limit - Total number of items to return.
+ def commits_from_page(gollum_page, options = {})
+ unless options[:limit]
+ options[:offset] = ([1, options.delete(:page).to_i].max - 1) * Gollum::Page.per_page
+ options[:limit] = (options.delete(:per_page) || Gollum::Page.per_page).to_i
+ end
+
+ @repository.log(ref: gollum_page.last_version.id,
+ path: gollum_page.path,
+ limit: options[:limit],
+ offset: options[:offset])
+ end
+
+ def gollum_wiki
+ @gollum_wiki ||= Gollum::Wiki.new(@repository.path)
+ end
+
+ def gollum_page_by_path(page_path)
+ page_name = Gollum::Page.canonicalize_filename(page_path)
+ page_dir = File.split(page_path).first
+
+ gollum_wiki.paged(page_name, page_dir)
+ end
+
+ def new_page(gollum_page)
+ Gitlab::Git::WikiPage.new(gollum_page, new_version(gollum_page, gollum_page.version.id))
+ end
+
+ def new_version(gollum_page, commit_id)
+ Gitlab::Git::WikiPageVersion.new(version(commit_id), gollum_page&.format)
+ end
+
+ def version(commit_id)
+ commit_find_proc = -> { Gitlab::Git::Commit.find(@repository, commit_id) }
+
+ if RequestStore.active?
+ RequestStore.fetch([:wiki_version_commit, commit_id]) { commit_find_proc.call }
+ else
+ commit_find_proc.call
+ end
+ end
+
+ def assert_type!(object, klass)
+ unless object.is_a?(klass)
+ raise ArgumentError, "expected a #{klass}, got #{object.inspect}"
+ end
+ end
+
+ def gitaly_wiki_client
+ @gitaly_wiki_client ||= Gitlab::GitalyClient::WikiService.new(@repository)
+ end
+
+ def gollum_write_page(name, format, content, commit_details)
+ assert_type!(format, Symbol)
+ assert_type!(commit_details, CommitDetails)
+
+ filename = File.basename(name)
+ dir = (tmp_dir = File.dirname(name)) == '.' ? '' : tmp_dir
+
+ gollum_wiki.write_page(filename, format, content, commit_details.to_h, dir)
+
+ nil
+ rescue Gollum::DuplicatePageError => e
+ raise Gitlab::Git::Wiki::DuplicatePageError, e.message
+ end
+
+ def gollum_delete_page(page_path, commit_details)
+ assert_type!(commit_details, CommitDetails)
+
+ gollum_wiki.delete_page(gollum_page_by_path(page_path), commit_details.to_h)
+ nil
+ end
+
+ def gollum_update_page(page_path, title, format, content, commit_details)
+ assert_type!(format, Symbol)
+ assert_type!(commit_details, CommitDetails)
+
+ page = gollum_page_by_path(page_path)
+ committer = Gollum::Committer.new(page.wiki, commit_details.to_h)
+
+ # Instead of performing two renames if the title has changed,
+ # the update_page will only update the format and content and
+ # the rename_page will do anything related to moving/renaming
+ gollum_wiki.update_page(page, page.name, format, content, committer: committer)
+ gollum_wiki.rename_page(page, title, committer: committer)
+ committer.commit
+ nil
+ end
+
+ def gollum_find_page(title:, version: nil, dir: nil)
+ if version
+ version = Gitlab::Git::Commit.find(@repository, version).id
+ end
+
+ gollum_page = gollum_wiki.page(title, version, dir)
+ return unless gollum_page
+
+ new_page(gollum_page)
+ end
+
+ def gollum_find_file(name, version)
+ version ||= self.class.default_ref
+ gollum_file = gollum_wiki.file(name, version)
+ return unless gollum_file
+
+ Gitlab::Git::WikiFile.new(gollum_file)
+ end
+
+ def gollum_get_all_pages(limit: nil)
+ gollum_wiki.pages(limit: limit).map { |gollum_page| new_page(gollum_page) }
+ end
+
+ def gitaly_write_page(name, format, content, commit_details)
+ gitaly_wiki_client.write_page(name, format, content, commit_details)
+ end
+
+ def gitaly_update_page(page_path, title, format, content, commit_details)
+ gitaly_wiki_client.update_page(page_path, title, format, content, commit_details)
+ end
+
+ def gitaly_delete_page(page_path, commit_details)
+ gitaly_wiki_client.delete_page(page_path, commit_details)
+ end
+
+ def gitaly_find_page(title:, version: nil, dir: nil)
+ wiki_page, version = gitaly_wiki_client.find_page(title: title, version: version, dir: dir)
+ return unless wiki_page
+
+ Gitlab::Git::WikiPage.new(wiki_page, version)
+ end
+
+ def gitaly_find_file(name, version)
+ wiki_file = gitaly_wiki_client.find_file(name, version)
+ return unless wiki_file
+
+ Gitlab::Git::WikiFile.new(wiki_file)
+ end
+
+ def gitaly_get_all_pages
+ gitaly_wiki_client.get_all_pages.map do |wiki_page, version|
+ Gitlab::Git::WikiPage.new(wiki_page, version)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/wiki_file.rb b/lib/gitlab/git/wiki_file.rb
new file mode 100644
index 00000000000..84335aca4bc
--- /dev/null
+++ b/lib/gitlab/git/wiki_file.rb
@@ -0,0 +1,20 @@
+module Gitlab
+ module Git
+ class WikiFile
+ attr_reader :mime_type, :raw_data, :name, :path
+
+ # This class is meant to be serializable so that it can be constructed
+ # by Gitaly and sent over the network to GitLab.
+ #
+ # Because Gollum::File is not serializable we must get all the data from
+ # 'gollum_file' during initialization, and NOT store it in an instance
+ # variable.
+ def initialize(gollum_file)
+ @mime_type = gollum_file.mime_type
+ @raw_data = gollum_file.raw_data
+ @name = gollum_file.name
+ @path = gollum_file.path
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/wiki_page.rb b/lib/gitlab/git/wiki_page.rb
new file mode 100644
index 00000000000..669ae11a423
--- /dev/null
+++ b/lib/gitlab/git/wiki_page.rb
@@ -0,0 +1,40 @@
+module Gitlab
+ module Git
+ class WikiPage
+ attr_reader :url_path, :title, :format, :path, :version, :raw_data, :name, :text_data, :historical, :formatted_data
+
+ # This class is meant to be serializable so that it can be constructed
+ # by Gitaly and sent over the network to GitLab.
+ #
+ # Because Gollum::Page is not serializable we must get all the data from
+ # 'gollum_page' during initialization, and NOT store it in an instance
+ # variable.
+ #
+ # Note that 'version' is a WikiPageVersion instance which it itself
+ # serializable. That means it's OK to store 'version' in an instance
+ # variable.
+ def initialize(gollum_page, version)
+ @url_path = gollum_page.url_path
+ @title = gollum_page.title
+ @format = gollum_page.format
+ @path = gollum_page.path
+ @raw_data = gollum_page.raw_data
+ @name = gollum_page.name
+ @historical = gollum_page.historical?
+ @formatted_data = gollum_page.formatted_data if gollum_page.is_a?(Gollum::Page)
+
+ @version = version
+ end
+
+ def historical?
+ @historical
+ end
+
+ def text_data
+ return @text_data if defined?(@text_data)
+
+ @text_data = @raw_data && Gitlab::EncodingHelper.encode!(@raw_data.dup)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/wiki_page_version.rb b/lib/gitlab/git/wiki_page_version.rb
new file mode 100644
index 00000000000..55f1afedcab
--- /dev/null
+++ b/lib/gitlab/git/wiki_page_version.rb
@@ -0,0 +1,19 @@
+module Gitlab
+ module Git
+ class WikiPageVersion
+ attr_reader :commit, :format
+
+ # This class is meant to be serializable so that it can be constructed
+ # by Gitaly and sent over the network to GitLab.
+ #
+ # Both 'commit' (a Gitlab::Git::Commit) and 'format' (a string) are
+ # serializable.
+ def initialize(commit, format)
+ @commit = commit
+ @format = format
+ end
+
+ delegate :message, :sha, :id, :author_name, :authored_date, to: :commit
+ end
+ end
+end
diff --git a/lib/gitlab/git_access.rb b/lib/gitlab/git_access.rb
index 62d1ecae676..6400089a22f 100644
--- a/lib/gitlab/git_access.rb
+++ b/lib/gitlab/git_access.rb
@@ -2,45 +2,58 @@
# class return an instance of `GitlabAccessStatus`
module Gitlab
class GitAccess
+ include Gitlab::Utils::StrongMemoize
+
UnauthorizedError = Class.new(StandardError)
NotFoundError = Class.new(StandardError)
+ ProjectCreationError = Class.new(StandardError)
ProjectMovedError = Class.new(NotFoundError)
ERROR_MESSAGES = {
upload: 'You are not allowed to upload code for this project.',
download: 'You are not allowed to download code from this project.',
- deploy_key_upload:
- 'This deploy key does not have write access to this project.',
+ auth_upload: 'You are not allowed to upload code.',
+ auth_download: 'You are not allowed to download code.',
+ deploy_key_upload: 'This deploy key does not have write access to this project.',
no_repo: 'A repository for this project does not exist yet.',
project_not_found: 'The project you were looking for could not be found.',
account_blocked: 'Your account has been blocked.',
command_not_allowed: "The command you're trying to execute is not allowed.",
upload_pack_disabled_over_http: 'Pulling over HTTP is not allowed.',
- receive_pack_disabled_over_http: 'Pushing over HTTP is not allowed.'
+ receive_pack_disabled_over_http: 'Pushing over HTTP is not allowed.',
+ read_only: 'The repository is temporarily read-only. Please try again later.',
+ cannot_push_to_read_only: "You can't push code to a read-only GitLab instance."
}.freeze
DOWNLOAD_COMMANDS = %w{ git-upload-pack git-upload-archive }.freeze
PUSH_COMMANDS = %w{ git-receive-pack }.freeze
ALL_COMMANDS = DOWNLOAD_COMMANDS + PUSH_COMMANDS
- attr_reader :actor, :project, :protocol, :authentication_abilities, :redirected_path
+ attr_reader :actor, :project, :protocol, :authentication_abilities, :namespace_path, :project_path, :redirected_path
- def initialize(actor, project, protocol, authentication_abilities:, redirected_path: nil)
+ def initialize(actor, project, protocol, authentication_abilities:, namespace_path: nil, project_path: nil, redirected_path: nil)
@actor = actor
@project = project
@protocol = protocol
- @redirected_path = redirected_path
@authentication_abilities = authentication_abilities
+ @namespace_path = namespace_path
+ @project_path = project_path
+ @redirected_path = redirected_path
end
def check(cmd, changes)
check_protocol!
check_valid_actor!
check_active_user!
- check_project_accessibility!
- check_project_moved!
+ check_authentication_abilities!(cmd)
check_command_disabled!(cmd)
check_command_existence!(cmd)
+ check_db_accessibility!(cmd)
+
+ ensure_project_on_push!(cmd, changes)
+
+ check_project_accessibility!
+ check_project_moved!
check_repository_existence!
case cmd
@@ -93,6 +106,19 @@ module Gitlab
end
end
+ def check_authentication_abilities!(cmd)
+ case cmd
+ when *DOWNLOAD_COMMANDS
+ unless authentication_abilities.include?(:download_code) || authentication_abilities.include?(:build_download_code)
+ raise UnauthorizedError, ERROR_MESSAGES[:auth_download]
+ end
+ when *PUSH_COMMANDS
+ unless authentication_abilities.include?(:push_code)
+ raise UnauthorizedError, ERROR_MESSAGES[:auth_upload]
+ end
+ end
+ end
+
def check_project_accessibility!
if project.blank? || !can_read_project?
raise NotFoundError, ERROR_MESSAGES[:project_not_found]
@@ -100,18 +126,15 @@ module Gitlab
end
def check_project_moved!
- return unless redirected_path
-
- url = protocol == 'ssh' ? project.ssh_url_to_repo : project.http_url_to_repo
- message = <<-MESSAGE.strip_heredoc
- Project '#{redirected_path}' was moved to '#{project.full_path}'.
+ return if redirected_path.nil?
- Please update your Git remote and try again:
+ project_moved = Checks::ProjectMoved.new(project, user, protocol, redirected_path)
- git remote set-url origin #{url}
- MESSAGE
-
- raise ProjectMovedError, message
+ if project_moved.permanent_redirect?
+ project_moved.add_message
+ else
+ raise ProjectMovedError, project_moved.message(rejected: true)
+ end
end
def check_command_disabled!(cmd)
@@ -140,16 +163,49 @@ module Gitlab
end
end
+ def check_db_accessibility!(cmd)
+ return unless receive_pack?(cmd)
+
+ if Gitlab::Database.read_only?
+ raise UnauthorizedError, push_to_read_only_message
+ end
+ end
+
+ def ensure_project_on_push!(cmd, changes)
+ return if project || deploy_key?
+ return unless receive_pack?(cmd) && changes == '_any' && authentication_abilities.include?(:push_code)
+
+ namespace = Namespace.find_by_full_path(namespace_path)
+
+ return unless user&.can?(:create_projects, namespace)
+
+ project_params = {
+ path: project_path,
+ namespace_id: namespace.id,
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE
+ }
+
+ project = Projects::CreateService.new(user, project_params).execute
+
+ unless project.saved?
+ raise ProjectCreationError, "Could not create project: #{project.errors.full_messages.join(', ')}"
+ end
+
+ @project = project
+ user_access.project = @project
+
+ Checks::ProjectCreated.new(project, user, protocol).add_message
+ end
+
def check_repository_existence!
- unless project.repository.exists?
- raise UnauthorizedError, ERROR_MESSAGES[:no_repo]
+ unless repository.exists?
+ raise NotFoundError, ERROR_MESSAGES[:no_repo]
end
end
def check_download_access!
- return if deploy_key?
-
- passed = user_can_download_code? ||
+ passed = deploy_key? ||
+ user_can_download_code? ||
build_can_download_code? ||
guest_can_download_code?
@@ -159,56 +215,49 @@ module Gitlab
end
def check_push_access!(changes)
+ if project.repository_read_only?
+ raise UnauthorizedError, ERROR_MESSAGES[:read_only]
+ end
+
if deploy_key
- check_deploy_key_push_access!
+ unless deploy_key.can_push_to?(project)
+ raise UnauthorizedError, ERROR_MESSAGES[:deploy_key_upload]
+ end
elsif user
- check_user_push_access!
+ # User access is verified in check_change_access!
else
raise UnauthorizedError, ERROR_MESSAGES[:upload]
end
- return if changes.blank? # Allow access.
+ return if changes.blank? # Allow access this is needed for EE.
check_change_access!(changes)
end
- def check_user_push_access!
- unless authentication_abilities.include?(:push_code)
- raise UnauthorizedError, ERROR_MESSAGES[:upload]
- end
- end
-
- def check_deploy_key_push_access!
- unless deploy_key.can_push_to?(project)
- raise UnauthorizedError, ERROR_MESSAGES[:deploy_key_upload]
- end
- end
-
def check_change_access!(changes)
changes_list = Gitlab::ChangesList.new(changes)
# Iterate over all changes to find if user allowed all of them to be applied
- changes_list.each do |change|
+ changes_list.each.with_index do |change, index|
+ first_change = index == 0
+
# If user does not have access to make at least one change, cancel all
# push by allowing the exception to bubble up
- check_single_change_access(change)
+ check_single_change_access(change, skip_lfs_integrity_check: !first_change)
end
end
- def check_single_change_access(change)
+ def check_single_change_access(change, skip_lfs_integrity_check: false)
Checks::ChangeAccess.new(
change,
user_access: user_access,
project: project,
skip_authorization: deploy_key?,
+ skip_lfs_integrity_check: skip_lfs_integrity_check,
protocol: protocol
).exec
end
- def matching_merge_request?(newrev, branch_name)
- Checks::MatchingMergeRequest.new(newrev, branch_name, project).match?
- end
-
def deploy_key
actor if deploy_key?
end
@@ -274,5 +323,13 @@ module Gitlab
UserAccess.new(user, project: project)
end
end
+
+ def push_to_read_only_message
+ ERROR_MESSAGES[:cannot_push_to_read_only]
+ end
+
+ def repository
+ project.repository
+ end
end
end
diff --git a/lib/gitlab/git_access_wiki.rb b/lib/gitlab/git_access_wiki.rb
index 1fe5155c093..a5b3902ebf4 100644
--- a/lib/gitlab/git_access_wiki.rb
+++ b/lib/gitlab/git_access_wiki.rb
@@ -1,6 +1,7 @@
module Gitlab
class GitAccessWiki < GitAccess
ERROR_MESSAGES = {
+ read_only: "You can't push code to a read-only GitLab instance.",
write_to_wiki: "You are not allowed to write to this project's wiki."
}.freeze
@@ -12,12 +13,26 @@ module Gitlab
authentication_abilities.include?(:download_code) && user_access.can_do_action?(:download_wiki_code)
end
- def check_single_change_access(change)
+ def check_single_change_access(change, _options = {})
unless user_access.can_do_action?(:create_wiki)
raise UnauthorizedError, ERROR_MESSAGES[:write_to_wiki]
end
+ if Gitlab::Database.read_only?
+ raise UnauthorizedError, push_to_read_only_message
+ end
+
true
end
+
+ def push_to_read_only_message
+ ERROR_MESSAGES[:read_only]
+ end
+
+ private
+
+ def repository
+ project.wiki.repository
+ end
end
end
diff --git a/lib/gitlab/git_ref_validator.rb b/lib/gitlab/git_ref_validator.rb
index a3c6b21a6a1..2e3e4fc3f1f 100644
--- a/lib/gitlab/git_ref_validator.rb
+++ b/lib/gitlab/git_ref_validator.rb
@@ -11,7 +11,7 @@ module Gitlab
return false if ref_name.start_with?('refs/remotes/')
Gitlab::Utils.system_silent(
- %W(#{Gitlab.config.git.bin_path} check-ref-format refs/#{ref_name}))
+ %W(#{Gitlab.config.git.bin_path} check-ref-format --branch #{ref_name}))
end
end
end
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index cbd9ff406de..9cd76630484 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -1,9 +1,12 @@
require 'base64'
require 'gitaly'
+require 'grpc/health/v1/health_pb'
+require 'grpc/health/v1/health_services_pb'
module Gitlab
module GitalyClient
+ include Gitlab::Metrics::Methods
module MigrationStatus
DISABLED = 1
OPT_IN = 2
@@ -27,24 +30,53 @@ module Gitlab
end
SERVER_VERSION_FILE = 'GITALY_SERVER_VERSION'.freeze
- MAXIMUM_GITALY_CALLS = 30
+ MAXIMUM_GITALY_CALLS = 35
+ CLIENT_NAME = (Sidekiq.server? ? 'gitlab-sidekiq' : 'gitlab-web').freeze
MUTEX = Mutex.new
- private_constant :MUTEX
+
+ class << self
+ attr_accessor :query_time
+ end
+
+ self.query_time = 0
+
+ define_histogram :gitaly_migrate_call_duration_seconds do
+ docstring "Gitaly migration call execution timings"
+ base_labels gitaly_enabled: nil, feature: nil
+ end
+
+ define_histogram :gitaly_controller_action_duration_seconds do
+ docstring "Gitaly endpoint histogram by controller and action combination"
+ base_labels Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil)
+ end
def self.stub(name, storage)
MUTEX.synchronize do
@stubs ||= {}
@stubs[storage] ||= {}
@stubs[storage][name] ||= begin
- klass = Gitaly.const_get(name.to_s.camelcase.to_sym).const_get(:Stub)
- addr = address(storage)
- addr = addr.sub(%r{^tcp://}, '') if URI(addr).scheme == 'tcp'
+ klass = stub_class(name)
+ addr = stub_address(storage)
klass.new(addr, :this_channel_is_insecure)
end
end
end
+ def self.stub_class(name)
+ if name == :health_check
+ Grpc::Health::V1::Health::Stub
+ else
+ Gitaly.const_get(name.to_s.camelcase.to_sym).const_get(:Stub)
+ end
+ end
+
+ def self.stub_address(storage)
+ addr = address(storage)
+ addr = addr.sub(%r{^tcp://}, '') if URI(addr).scheme == 'tcp'
+ addr
+ end
+
def self.clear_stubs!
MUTEX.synchronize do
@stubs = nil
@@ -67,19 +99,98 @@ module Gitlab
address
end
+ def self.address_metadata(storage)
+ Base64.strict_encode64(JSON.dump({ storage => { 'address' => address(storage), 'token' => token(storage) } }))
+ end
+
# All Gitaly RPC call sites should use GitalyClient.call. This method
# makes sure that per-request authentication headers are set.
- def self.call(storage, service, rpc, request)
+ #
+ # This method optionally takes a block which receives the keyword
+ # arguments hash 'kwargs' that will be passed to gRPC. This allows the
+ # caller to modify or augment the keyword arguments. The block must
+ # return a hash.
+ #
+ # For example:
+ #
+ # GitalyClient.call(storage, service, rpc, request) do |kwargs|
+ # kwargs.merge(deadline: Time.now + 10)
+ # end
+ #
+ def self.call(storage, service, rpc, request, remote_storage: nil, timeout: nil)
+ start = Gitlab::Metrics::System.monotonic_time
enforce_gitaly_request_limits(:call)
- metadata = request_metadata(storage)
- metadata = yield(metadata) if block_given?
- stub(service, storage).__send__(rpc, request, metadata) # rubocop:disable GitlabSecurity/PublicSend
+ kwargs = request_kwargs(storage, timeout, remote_storage: remote_storage)
+ kwargs = yield(kwargs) if block_given?
+
+ stub(service, storage).__send__(rpc, request, kwargs) # rubocop:disable GitlabSecurity/PublicSend
+ rescue GRPC::Unavailable => ex
+ handle_grpc_unavailable!(ex)
+ ensure
+ duration = Gitlab::Metrics::System.monotonic_time - start
+
+ # Keep track, seperately, for the performance bar
+ self.query_time += duration
+ gitaly_controller_action_duration_seconds.observe(
+ current_transaction_labels.merge(gitaly_service: service.to_s, rpc: rpc.to_s),
+ duration)
end
- def self.request_metadata(storage)
+ def self.handle_grpc_unavailable!(ex)
+ status = ex.to_status
+ raise ex unless status.details == 'Endpoint read failed'
+
+ # There is a bug in grpc 1.8.x that causes a client process to get stuck
+ # always raising '14:Endpoint read failed'. The only thing that we can
+ # do to recover is to restart the process.
+ #
+ # See https://gitlab.com/gitlab-org/gitaly/issues/1029
+
+ if Sidekiq.server?
+ raise Gitlab::SidekiqMiddleware::Shutdown::WantShutdown.new(ex.to_s)
+ else
+ # SIGQUIT requests a Unicorn worker to shut down gracefully after the current request.
+ Process.kill('QUIT', Process.pid)
+ end
+
+ raise ex
+ end
+ private_class_method :handle_grpc_unavailable!
+
+ def self.current_transaction_labels
+ Gitlab::Metrics::Transaction.current&.labels || {}
+ end
+ private_class_method :current_transaction_labels
+
+ def self.request_kwargs(storage, timeout, remote_storage: nil)
encoded_token = Base64.strict_encode64(token(storage).to_s)
- { metadata: { 'authorization' => "Bearer #{encoded_token}" } }
+ metadata = {
+ 'authorization' => "Bearer #{encoded_token}",
+ 'client_name' => CLIENT_NAME
+ }
+
+ feature_stack = Thread.current[:gitaly_feature_stack]
+ feature = feature_stack && feature_stack[0]
+ metadata['call_site'] = feature.to_s if feature
+ metadata['gitaly-servers'] = address_metadata(remote_storage) if remote_storage
+
+ result = { metadata: metadata }
+
+ # nil timeout indicates that we should use the default
+ timeout = default_timeout if timeout.nil?
+
+ return result unless timeout > 0
+
+ # Do not use `Time.now` for deadline calculation, since it
+ # will be affected by Timecop in some tests, but grpc's c-core
+ # uses system time instead of timecop's time, so tests will fail
+ # `Time.at(Process.clock_gettime(Process::CLOCK_REALTIME))` will
+ # circumvent timecop
+ deadline = Time.at(Process.clock_gettime(Process::CLOCK_REALTIME)) + timeout
+ result[:deadline] = deadline
+
+ result
end
def self.token(storage)
@@ -137,7 +248,17 @@ module Gitlab
Gitlab::Metrics.measure(metric_name) do
# Some migrate calls wrap other migrate calls
allow_n_plus_1_calls do
- yield is_enabled
+ feature_stack = Thread.current[:gitaly_feature_stack] ||= []
+ feature_stack.unshift(feature)
+ begin
+ start = Gitlab::Metrics::System.monotonic_time
+ yield is_enabled
+ ensure
+ total_time = Gitlab::Metrics::System.monotonic_time - start
+ gitaly_migrate_call_duration_seconds.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time)
+ feature_stack.shift
+ Thread.current[:gitaly_feature_stack] = nil if feature_stack.empty?
+ end
end
end
end
@@ -151,7 +272,7 @@ module Gitlab
actual_call_count = increment_call_count("gitaly_#{call_site}_actual")
# Do no enforce limits in production
- return if Rails.env.production?
+ return if Rails.env.production? || ENV["GITALY_DISABLE_REQUEST_LIMITS"]
# Check if this call is nested within a allow_n_plus_1_calls
# block and skip check if it is
@@ -228,9 +349,29 @@ module Gitlab
path.read.chomp
end
- def self.encode(s)
- s.dup.force_encoding(Encoding::ASCII_8BIT)
+ def self.timestamp(t)
+ Google::Protobuf::Timestamp.new(seconds: t.to_i)
+ end
+
+ # The default timeout on all Gitaly calls
+ def self.default_timeout
+ return 0 if Sidekiq.server?
+
+ timeout(:gitaly_timeout_default)
+ end
+
+ def self.fast_timeout
+ timeout(:gitaly_timeout_fast)
+ end
+
+ def self.medium_timeout
+ timeout(:gitaly_timeout_medium)
+ end
+
+ def self.timeout(timeout_name)
+ Gitlab::CurrentSettings.current_application_settings[timeout_name]
end
+ private_class_method :timeout
# Count a stack. Used for n+1 detection
def self.count_stack
diff --git a/lib/gitlab/gitaly_client/attributes_bag.rb b/lib/gitlab/gitaly_client/attributes_bag.rb
new file mode 100644
index 00000000000..198a1de91c7
--- /dev/null
+++ b/lib/gitlab/gitaly_client/attributes_bag.rb
@@ -0,0 +1,31 @@
+module Gitlab
+ module GitalyClient
+ # This module expects an `ATTRS` const to be defined on the subclass
+ # See GitalyClient::WikiFile for an example
+ module AttributesBag
+ extend ActiveSupport::Concern
+
+ included do
+ attr_accessor(*const_get(:ATTRS))
+ end
+
+ def initialize(params)
+ params = params.with_indifferent_access
+
+ attributes.each do |attr|
+ instance_variable_set("@#{attr}", params[attr])
+ end
+ end
+
+ def ==(other)
+ attributes.all? do |field|
+ instance_variable_get("@#{field}") == other.instance_variable_get("@#{field}")
+ end
+ end
+
+ def attributes
+ self.class.const_get(:ATTRS)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/blob_service.rb b/lib/gitlab/gitaly_client/blob_service.rb
index a250eb75bd4..28554208984 100644
--- a/lib/gitlab/gitaly_client/blob_service.rb
+++ b/lib/gitlab/gitaly_client/blob_service.rb
@@ -1,6 +1,8 @@
module Gitlab
module GitalyClient
class BlobService
+ include Gitlab::EncodingHelper
+
def initialize(repository)
@gitaly_repo = repository.gitaly_repository
end
@@ -32,6 +34,87 @@ module Gitlab
binary: Gitlab::Git::Blob.binary?(data)
)
end
+
+ def batch_lfs_pointers(blob_ids)
+ return [] if blob_ids.empty?
+
+ request = Gitaly::GetLFSPointersRequest.new(
+ repository: @gitaly_repo,
+ blob_ids: blob_ids
+ )
+
+ response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_lfs_pointers, request)
+
+ map_lfs_pointers(response)
+ end
+
+ def get_blobs(revision_paths, limit = -1)
+ return [] if revision_paths.empty?
+
+ revision_paths.map! do |rev, path|
+ Gitaly::GetBlobsRequest::RevisionPath.new(revision: rev, path: encode_binary(path))
+ end
+
+ request = Gitaly::GetBlobsRequest.new(
+ repository: @gitaly_repo,
+ revision_paths: revision_paths,
+ limit: limit
+ )
+
+ response = GitalyClient.call(
+ @gitaly_repo.storage_name,
+ :blob_service,
+ :get_blobs,
+ request,
+ timeout: GitalyClient.default_timeout
+ )
+
+ GitalyClient::BlobsStitcher.new(response)
+ end
+
+ def get_new_lfs_pointers(revision, limit, not_in)
+ request = Gitaly::GetNewLFSPointersRequest.new(
+ repository: @gitaly_repo,
+ revision: encode_binary(revision),
+ limit: limit || 0
+ )
+
+ if not_in.nil? || not_in == :all
+ request.not_in_all = true
+ else
+ request.not_in_refs += not_in
+ end
+
+ response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_new_lfs_pointers, request)
+
+ map_lfs_pointers(response)
+ end
+
+ def get_all_lfs_pointers(revision)
+ request = Gitaly::GetNewLFSPointersRequest.new(
+ repository: @gitaly_repo,
+ revision: encode_binary(revision)
+ )
+
+ response = GitalyClient.call(@gitaly_repo.storage_name, :blob_service, :get_all_lfs_pointers, request)
+
+ map_lfs_pointers(response)
+ end
+
+ private
+
+ def map_lfs_pointers(response)
+ response.flat_map do |message|
+ message.lfs_pointers.map do |lfs_pointer|
+ Gitlab::Git::Blob.new(
+ id: lfs_pointer.oid,
+ size: lfs_pointer.size,
+ data: lfs_pointer.data,
+ binary: Gitlab::Git::Blob.binary?(lfs_pointer.data)
+ )
+ end
+ end
+ end
end
end
end
diff --git a/lib/gitlab/gitaly_client/blobs_stitcher.rb b/lib/gitlab/gitaly_client/blobs_stitcher.rb
new file mode 100644
index 00000000000..5ca592ff812
--- /dev/null
+++ b/lib/gitlab/gitaly_client/blobs_stitcher.rb
@@ -0,0 +1,47 @@
+module Gitlab
+ module GitalyClient
+ class BlobsStitcher
+ include Enumerable
+
+ def initialize(rpc_response)
+ @rpc_response = rpc_response
+ end
+
+ def each
+ current_blob_data = nil
+
+ @rpc_response.each do |msg|
+ begin
+ if msg.oid.blank? && msg.data.blank?
+ next
+ elsif msg.oid.present?
+ yield new_blob(current_blob_data) if current_blob_data
+
+ current_blob_data = msg.to_h.slice(:oid, :path, :size, :revision, :mode)
+ current_blob_data[:data] = msg.data.dup
+ else
+ current_blob_data[:data] << msg.data
+ end
+ end
+ end
+
+ yield new_blob(current_blob_data) if current_blob_data
+ end
+
+ private
+
+ def new_blob(blob_data)
+ Gitlab::Git::Blob.new(
+ id: blob_data[:oid],
+ mode: blob_data[:mode].to_s(8),
+ name: File.basename(blob_data[:path]),
+ path: blob_data[:path],
+ size: blob_data[:size],
+ commit_id: blob_data[:revision],
+ data: blob_data[:data],
+ binary: Gitlab::Git::Blob.binary?(blob_data[:data])
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb
index b536eb1868c..456a8a1a2d6 100644
--- a/lib/gitlab/gitaly_client/commit_service.rb
+++ b/lib/gitlab/gitaly_client/commit_service.rb
@@ -1,6 +1,8 @@
module Gitlab
module GitalyClient
class CommitService
+ include Gitlab::EncodingHelper
+
# The ID of empty tree.
# See http://stackoverflow.com/a/40884093/1856239 and https://github.com/git/git/blob/3ad8b5bf26362ac67c9020bf8c30eee54a84f56d/cache.h#L1011-L1012
EMPTY_TREE_ID = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'.freeze
@@ -13,12 +15,12 @@ module Gitlab
def ls_files(revision)
request = Gitaly::ListFilesRequest.new(
repository: @gitaly_repo,
- revision: GitalyClient.encode(revision)
+ revision: encode_binary(revision)
)
- response = GitalyClient.call(@repository.storage, :commit_service, :list_files, request)
+ response = GitalyClient.call(@repository.storage, :commit_service, :list_files, request, timeout: GitalyClient.medium_timeout)
response.flat_map do |msg|
- msg.paths.map { |d| d.dup.force_encoding(Encoding::UTF_8) }
+ msg.paths.map { |d| EncodingHelper.encode!(d.dup) }
end
end
@@ -29,23 +31,49 @@ module Gitlab
child_id: child_id
)
- GitalyClient.call(@repository.storage, :commit_service, :commit_is_ancestor, request).value
+ GitalyClient.call(@repository.storage, :commit_service, :commit_is_ancestor, request, timeout: GitalyClient.fast_timeout).value
+ end
+
+ def diff(from, to, options = {})
+ from_id = case from
+ when NilClass
+ EMPTY_TREE_ID
+ else
+ if from.respond_to?(:oid)
+ # This is meant to match a Rugged::Commit. This should be impossible in
+ # the future.
+ from.oid
+ else
+ from
+ end
+ end
+
+ to_id = case to
+ when NilClass
+ EMPTY_TREE_ID
+ else
+ if to.respond_to?(:oid)
+ # This is meant to match a Rugged::Commit. This should be impossible in
+ # the future.
+ to.oid
+ else
+ to
+ end
+ end
+
+ request_params = diff_between_commits_request_params(from_id, to_id, options)
+
+ call_commit_diff(request_params, options)
end
def diff_from_parent(commit, options = {})
- request_params = commit_diff_request_params(commit, options)
- request_params[:ignore_whitespace_change] = options.fetch(:ignore_whitespace_change, false)
- request_params[:enforce_limits] = options.fetch(:limits, true)
- request_params[:collapse_diffs] = request_params[:enforce_limits] || !options.fetch(:expanded, true)
- request_params.merge!(Gitlab::Git::DiffCollection.collection_limits(options).to_h)
+ request_params = diff_from_parent_request_params(commit, options)
- request = Gitaly::CommitDiffRequest.new(request_params)
- response = GitalyClient.call(@repository.storage, :diff_service, :commit_diff, request)
- GitalyClient::DiffStitcher.new(response)
+ call_commit_diff(request_params, options)
end
def commit_deltas(commit)
- request = Gitaly::CommitDeltaRequest.new(commit_diff_request_params(commit))
+ request = Gitaly::CommitDeltaRequest.new(diff_from_parent_request_params(commit))
response = GitalyClient.call(@repository.storage, :diff_service, :commit_delta, request)
response.flat_map { |msg| msg.deltas }
@@ -55,11 +83,11 @@ module Gitlab
request = Gitaly::TreeEntryRequest.new(
repository: @gitaly_repo,
revision: ref,
- path: GitalyClient.encode(path),
+ path: encode_binary(path),
limit: limit.to_i
)
- response = GitalyClient.call(@repository.storage, :commit_service, :tree_entry, request)
+ response = GitalyClient.call(@repository.storage, :commit_service, :tree_entry, request, timeout: GitalyClient.medium_timeout)
entry = nil
data = ''
@@ -77,14 +105,15 @@ module Gitlab
entry unless entry.oid.blank?
end
- def tree_entries(repository, revision, path)
+ def tree_entries(repository, revision, path, recursive)
request = Gitaly::GetTreeEntriesRequest.new(
repository: @gitaly_repo,
- revision: GitalyClient.encode(revision),
- path: path.present? ? GitalyClient.encode(path) : '.'
+ revision: encode_binary(revision),
+ path: path.present? ? encode_binary(path) : '.',
+ recursive: recursive
)
- response = GitalyClient.call(@repository.storage, :commit_service, :get_tree_entries, request)
+ response = GitalyClient.call(@repository.storage, :commit_service, :get_tree_entries, request, timeout: GitalyClient.medium_timeout)
response.flat_map do |message|
message.entries.map do |gitaly_tree_entry|
@@ -94,8 +123,8 @@ module Gitlab
type: gitaly_tree_entry.type.downcase,
mode: gitaly_tree_entry.mode.to_s(8),
name: File.basename(gitaly_tree_entry.path),
- path: GitalyClient.encode(gitaly_tree_entry.path),
- flat_path: GitalyClient.encode(gitaly_tree_entry.flat_path),
+ path: encode_binary(gitaly_tree_entry.path),
+ flat_path: encode_binary(gitaly_tree_entry.flat_path),
commit_id: gitaly_tree_entry.commit_oid
)
end
@@ -105,23 +134,25 @@ module Gitlab
def commit_count(ref, options = {})
request = Gitaly::CountCommitsRequest.new(
repository: @gitaly_repo,
- revision: ref
+ revision: encode_binary(ref),
+ all: !!options[:all]
)
request.after = Google::Protobuf::Timestamp.new(seconds: options[:after].to_i) if options[:after].present?
request.before = Google::Protobuf::Timestamp.new(seconds: options[:before].to_i) if options[:before].present?
- request.path = options[:path] if options[:path].present?
+ request.path = encode_binary(options[:path]) if options[:path].present?
+ request.max_count = options[:max_count] if options[:max_count].present?
- GitalyClient.call(@repository.storage, :commit_service, :count_commits, request).count
+ GitalyClient.call(@repository.storage, :commit_service, :count_commits, request, timeout: GitalyClient.medium_timeout).count
end
def last_commit_for_path(revision, path)
request = Gitaly::LastCommitForPathRequest.new(
repository: @gitaly_repo,
- revision: GitalyClient.encode(revision),
- path: GitalyClient.encode(path.to_s)
+ revision: encode_binary(revision),
+ path: encode_binary(path.to_s)
)
- gitaly_commit = GitalyClient.call(@repository.storage, :commit_service, :last_commit_for_path, request).commit
+ gitaly_commit = GitalyClient.call(@repository.storage, :commit_service, :last_commit_for_path, request, timeout: GitalyClient.fast_timeout).commit
return unless gitaly_commit
Gitlab::Git::Commit.new(@repository, gitaly_commit)
@@ -134,7 +165,7 @@ module Gitlab
to: to
)
- response = GitalyClient.call(@repository.storage, :commit_service, :commits_between, request)
+ response = GitalyClient.call(@repository.storage, :commit_service, :commits_between, request, timeout: GitalyClient.medium_timeout)
consume_commits_response(response)
end
@@ -147,10 +178,19 @@ module Gitlab
)
request.order = opts[:order].upcase if opts[:order].present?
- response = GitalyClient.call(@repository.storage, :commit_service, :find_all_commits, request)
+ response = GitalyClient.call(@repository.storage, :commit_service, :find_all_commits, request, timeout: GitalyClient.medium_timeout)
consume_commits_response(response)
end
+ def list_commits_by_oid(oids)
+ request = Gitaly::ListCommitsByOidRequest.new(repository: @gitaly_repo, oid: oids)
+
+ response = GitalyClient.call(@repository.storage, :commit_service, :list_commits_by_oid, request, timeout: GitalyClient.medium_timeout)
+ consume_commits_response(response)
+ rescue GRPC::NotFound # If no repository is found, happens mainly during testing
+ []
+ end
+
def commits_by_message(query, revision: '', path: '', limit: 1000, offset: 0)
request = Gitaly::CommitsByMessageRequest.new(
repository: @gitaly_repo,
@@ -161,7 +201,7 @@ module Gitlab
offset: offset.to_i
)
- response = GitalyClient.call(@repository.storage, :commit_service, :commits_by_message, request)
+ response = GitalyClient.call(@repository.storage, :commit_service, :commits_by_message, request, timeout: GitalyClient.medium_timeout)
consume_commits_response(response)
end
@@ -175,31 +215,42 @@ module Gitlab
def raw_blame(revision, path)
request = Gitaly::RawBlameRequest.new(
repository: @gitaly_repo,
- revision: GitalyClient.encode(revision),
- path: GitalyClient.encode(path)
+ revision: encode_binary(revision),
+ path: encode_binary(path)
)
- response = GitalyClient.call(@repository.storage, :commit_service, :raw_blame, request)
+ response = GitalyClient.call(@repository.storage, :commit_service, :raw_blame, request, timeout: GitalyClient.medium_timeout)
response.reduce("") { |memo, msg| memo << msg.data }
end
def find_commit(revision)
- request = Gitaly::FindCommitRequest.new(
- repository: @gitaly_repo,
- revision: GitalyClient.encode(revision)
- )
-
- response = GitalyClient.call(@repository.storage, :commit_service, :find_commit, request)
-
- response.commit
+ if RequestStore.active?
+ # We don't use RequeStstore.fetch(key) { ... } directly because `revision`
+ # can be a branch name, so we can't use it as a key as it could point
+ # to another commit later on (happens a lot in tests).
+ key = {
+ storage: @gitaly_repo.storage_name,
+ relative_path: @gitaly_repo.relative_path,
+ commit_id: revision
+ }
+ return RequestStore[key] if RequestStore.exist?(key)
+
+ commit = call_find_commit(revision)
+ return unless commit
+
+ key[:commit_id] = commit.id
+ RequestStore[key] = commit
+ else
+ call_find_commit(revision)
+ end
end
def patch(revision)
request = Gitaly::CommitPatchRequest.new(
repository: @gitaly_repo,
- revision: GitalyClient.encode(revision)
+ revision: encode_binary(revision)
)
- response = GitalyClient.call(@repository.storage, :diff_service, :commit_patch, request)
+ response = GitalyClient.call(@repository.storage, :diff_service, :commit_patch, request, timeout: GitalyClient.medium_timeout)
response.sum(&:data)
end
@@ -207,21 +258,111 @@ module Gitlab
def commit_stats(revision)
request = Gitaly::CommitStatsRequest.new(
repository: @gitaly_repo,
- revision: GitalyClient.encode(revision)
+ revision: encode_binary(revision)
)
- GitalyClient.call(@repository.storage, :commit_service, :commit_stats, request)
+ GitalyClient.call(@repository.storage, :commit_service, :commit_stats, request, timeout: GitalyClient.medium_timeout)
+ end
+
+ def find_commits(options)
+ request = Gitaly::FindCommitsRequest.new(
+ repository: @gitaly_repo,
+ limit: options[:limit],
+ offset: options[:offset],
+ follow: options[:follow],
+ skip_merges: options[:skip_merges],
+ all: !!options[:all],
+ disable_walk: true # This option is deprecated. The 'walk' implementation is being removed.
+ )
+ request.after = GitalyClient.timestamp(options[:after]) if options[:after]
+ request.before = GitalyClient.timestamp(options[:before]) if options[:before]
+ request.revision = encode_binary(options[:ref]) if options[:ref]
+
+ request.paths = encode_repeated(Array(options[:path])) if options[:path].present?
+
+ response = GitalyClient.call(@repository.storage, :commit_service, :find_commits, request, timeout: GitalyClient.medium_timeout)
+
+ consume_commits_response(response)
+ end
+
+ def filter_shas_with_signatures(shas)
+ request = Gitaly::FilterShasWithSignaturesRequest.new(repository: @gitaly_repo)
+
+ enum = Enumerator.new do |y|
+ shas.each_slice(20) do |revs|
+ request.shas = encode_repeated(revs)
+
+ y.yield request
+
+ request = Gitaly::FilterShasWithSignaturesRequest.new
+ end
+ end
+
+ response = GitalyClient.call(@repository.storage, :commit_service, :filter_shas_with_signatures, enum)
+
+ response.flat_map do |msg|
+ msg.shas.map { |sha| EncodingHelper.encode!(sha) }
+ end
+ end
+
+ def extract_signature(commit_id)
+ request = Gitaly::ExtractCommitSignatureRequest.new(repository: @gitaly_repo, commit_id: commit_id)
+ response = GitalyClient.call(@repository.storage, :commit_service, :extract_commit_signature, request)
+
+ signature = ''.b
+ signed_text = ''.b
+
+ response.each do |message|
+ signature << message.signature
+ signed_text << message.signed_text
+ end
+
+ return if signature.blank? && signed_text.blank?
+
+ [signature, signed_text]
+ end
+
+ def get_commit_signatures(commit_ids)
+ request = Gitaly::GetCommitSignaturesRequest.new(repository: @gitaly_repo, commit_ids: commit_ids)
+ response = GitalyClient.call(@repository.storage, :commit_service, :get_commit_signatures, request)
+
+ signatures = Hash.new { |h, k| h[k] = [''.b, ''.b] }
+ current_commit_id = nil
+
+ response.each do |message|
+ current_commit_id = message.commit_id if message.commit_id.present?
+
+ signatures[current_commit_id].first << message.signature
+ signatures[current_commit_id].last << message.signed_text
+ end
+
+ signatures
end
private
- def commit_diff_request_params(commit, options = {})
+ def call_commit_diff(request_params, options = {})
+ request_params[:ignore_whitespace_change] = options.fetch(:ignore_whitespace_change, false)
+ request_params[:enforce_limits] = options.fetch(:limits, true)
+ request_params[:collapse_diffs] = request_params[:enforce_limits] || !options.fetch(:expanded, true)
+ request_params.merge!(Gitlab::Git::DiffCollection.collection_limits(options).to_h)
+
+ request = Gitaly::CommitDiffRequest.new(request_params)
+ response = GitalyClient.call(@repository.storage, :diff_service, :commit_diff, request, timeout: GitalyClient.medium_timeout)
+ GitalyClient::DiffStitcher.new(response)
+ end
+
+ def diff_from_parent_request_params(commit, options = {})
parent_id = commit.parent_ids.first || EMPTY_TREE_ID
+ diff_between_commits_request_params(parent_id, commit.id, options)
+ end
+
+ def diff_between_commits_request_params(from_id, to_id, options)
{
repository: @gitaly_repo,
- left_commit_id: parent_id,
- right_commit_id: commit.id,
- paths: options.fetch(:paths, []).map { |path| GitalyClient.encode(path) }
+ left_commit_id: from_id,
+ right_commit_id: to_id,
+ paths: options.fetch(:paths, []).compact.map { |path| encode_binary(path) }
}
end
@@ -232,6 +373,21 @@ module Gitlab
end
end
end
+
+ def encode_repeated(a)
+ Google::Protobuf::RepeatedField.new(:bytes, a.map { |s| encode_binary(s) } )
+ end
+
+ def call_find_commit(revision)
+ request = Gitaly::FindCommitRequest.new(
+ repository: @gitaly_repo,
+ revision: encode_binary(revision)
+ )
+
+ response = GitalyClient.call(@repository.storage, :commit_service, :find_commit, request, timeout: GitalyClient.medium_timeout)
+
+ response.commit
+ end
end
end
end
diff --git a/lib/gitlab/gitaly_client/conflict_files_stitcher.rb b/lib/gitlab/gitaly_client/conflict_files_stitcher.rb
new file mode 100644
index 00000000000..97c13d1fdb0
--- /dev/null
+++ b/lib/gitlab/gitaly_client/conflict_files_stitcher.rb
@@ -0,0 +1,47 @@
+module Gitlab
+ module GitalyClient
+ class ConflictFilesStitcher
+ include Enumerable
+
+ def initialize(rpc_response)
+ @rpc_response = rpc_response
+ end
+
+ def each
+ current_file = nil
+
+ @rpc_response.each do |msg|
+ msg.files.each do |gitaly_file|
+ if gitaly_file.header
+ yield current_file if current_file
+
+ current_file = file_from_gitaly_header(gitaly_file.header)
+ else
+ current_file.content << gitaly_file.content
+ end
+ end
+ end
+
+ yield current_file if current_file
+ end
+
+ private
+
+ def file_from_gitaly_header(header)
+ Gitlab::Git::Conflict::File.new(
+ Gitlab::GitalyClient::Util.git_repository(header.repository),
+ header.commit_oid,
+ conflict_from_gitaly_file_header(header),
+ ''
+ )
+ end
+
+ def conflict_from_gitaly_file_header(header)
+ {
+ ours: { path: header.our_path, mode: header.our_mode },
+ theirs: { path: header.their_path }
+ }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/conflicts_service.rb b/lib/gitlab/gitaly_client/conflicts_service.rb
new file mode 100644
index 00000000000..e14734495a8
--- /dev/null
+++ b/lib/gitlab/gitaly_client/conflicts_service.rb
@@ -0,0 +1,72 @@
+module Gitlab
+ module GitalyClient
+ class ConflictsService
+ include Gitlab::EncodingHelper
+
+ MAX_MSG_SIZE = 128.kilobytes.freeze
+
+ def initialize(repository, our_commit_oid, their_commit_oid)
+ @gitaly_repo = repository.gitaly_repository
+ @repository = repository
+ @our_commit_oid = our_commit_oid
+ @their_commit_oid = their_commit_oid
+ end
+
+ def list_conflict_files
+ request = Gitaly::ListConflictFilesRequest.new(
+ repository: @gitaly_repo,
+ our_commit_oid: @our_commit_oid,
+ their_commit_oid: @their_commit_oid
+ )
+ response = GitalyClient.call(@repository.storage, :conflicts_service, :list_conflict_files, request)
+
+ GitalyClient::ConflictFilesStitcher.new(response)
+ end
+
+ def conflicts?
+ list_conflict_files.any?
+ rescue GRPC::FailedPrecondition
+ # The server raises this exception when it encounters ConflictSideMissing, which
+ # means a conflict exists but its `theirs` or `ours` data is nil due to a non-existent
+ # file in one of the trees.
+ true
+ end
+
+ def resolve_conflicts(target_repository, resolution, source_branch, target_branch)
+ reader = binary_stringio(resolution.files.to_json)
+
+ req_enum = Enumerator.new do |y|
+ header = resolve_conflicts_request_header(target_repository, resolution, source_branch, target_branch)
+ y.yield Gitaly::ResolveConflictsRequest.new(header: header)
+
+ until reader.eof?
+ chunk = reader.read(MAX_MSG_SIZE)
+
+ y.yield Gitaly::ResolveConflictsRequest.new(files_json: chunk)
+ end
+ end
+
+ response = GitalyClient.call(@repository.storage, :conflicts_service, :resolve_conflicts, req_enum, remote_storage: target_repository.storage)
+
+ if response.resolution_error.present?
+ raise Gitlab::Git::Conflict::Resolver::ResolutionError, response.resolution_error
+ end
+ end
+
+ private
+
+ def resolve_conflicts_request_header(target_repository, resolution, source_branch, target_branch)
+ Gitaly::ResolveConflictsRequestHeader.new(
+ repository: @gitaly_repo,
+ our_commit_oid: @our_commit_oid,
+ target_repository: target_repository.gitaly_repository,
+ their_commit_oid: @their_commit_oid,
+ source_branch: source_branch,
+ target_branch: target_branch,
+ commit_message: resolution.commit_message,
+ user: Gitlab::Git::User.from_gitlab(resolution.user).to_gitaly
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/diff.rb b/lib/gitlab/gitaly_client/diff.rb
index 54df6304865..d98a0ce988f 100644
--- a/lib/gitlab/gitaly_client/diff.rb
+++ b/lib/gitlab/gitaly_client/diff.rb
@@ -1,21 +1,9 @@
module Gitlab
module GitalyClient
class Diff
- FIELDS = %i(from_path to_path old_mode new_mode from_id to_id patch overflow_marker collapsed).freeze
+ ATTRS = %i(from_path to_path old_mode new_mode from_id to_id patch overflow_marker collapsed).freeze
- attr_accessor(*FIELDS)
-
- def initialize(params)
- params.each do |key, val|
- public_send(:"#{key}=", val) # rubocop:disable GitlabSecurity/PublicSend
- end
- end
-
- def ==(other)
- FIELDS.all? do |field|
- public_send(field) == other.public_send(field) # rubocop:disable GitlabSecurity/PublicSend
- end
- end
+ include AttributesBag
end
end
end
diff --git a/lib/gitlab/gitaly_client/diff_stitcher.rb b/lib/gitlab/gitaly_client/diff_stitcher.rb
index 65d81dc5d46..da243ee2d1a 100644
--- a/lib/gitlab/gitaly_client/diff_stitcher.rb
+++ b/lib/gitlab/gitaly_client/diff_stitcher.rb
@@ -12,7 +12,7 @@ module Gitlab
@rpc_response.each do |diff_msg|
if current_diff.nil?
- diff_params = diff_msg.to_h.slice(*GitalyClient::Diff::FIELDS)
+ diff_params = diff_msg.to_h.slice(*GitalyClient::Diff::ATTRS)
# gRPC uses frozen strings by default, and we need to have an unfrozen string as it
# gets processed further down the line. So we unfreeze the first chunk of the patch
# in case it's the only chunk we receive for this diff.
diff --git a/lib/gitlab/gitaly_client/health_check_service.rb b/lib/gitlab/gitaly_client/health_check_service.rb
new file mode 100644
index 00000000000..6c1213f5e20
--- /dev/null
+++ b/lib/gitlab/gitaly_client/health_check_service.rb
@@ -0,0 +1,19 @@
+module Gitlab
+ module GitalyClient
+ class HealthCheckService
+ def initialize(storage)
+ @storage = storage
+ end
+
+ # Sends a gRPC health ping to the Gitaly server for the storage shard.
+ def check
+ request = Grpc::Health::V1::HealthCheckRequest.new
+ response = GitalyClient.call(@storage, :health_check, :check, request, timeout: GitalyClient.fast_timeout)
+
+ { success: response&.status == :SERVING }
+ rescue GRPC::BadStatus => e
+ { success: false, message: e.to_s }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/namespace_service.rb b/lib/gitlab/gitaly_client/namespace_service.rb
new file mode 100644
index 00000000000..bd7c345ac01
--- /dev/null
+++ b/lib/gitlab/gitaly_client/namespace_service.rb
@@ -0,0 +1,39 @@
+module Gitlab
+ module GitalyClient
+ class NamespaceService
+ def initialize(storage)
+ @storage = storage
+ end
+
+ def exists?(name)
+ request = Gitaly::NamespaceExistsRequest.new(storage_name: @storage, name: name)
+
+ gitaly_client_call(:namespace_exists, request).exists
+ end
+
+ def add(name)
+ request = Gitaly::AddNamespaceRequest.new(storage_name: @storage, name: name)
+
+ gitaly_client_call(:add_namespace, request)
+ end
+
+ def remove(name)
+ request = Gitaly::RemoveNamespaceRequest.new(storage_name: @storage, name: name)
+
+ gitaly_client_call(:remove_namespace, request)
+ end
+
+ def rename(from, to)
+ request = Gitaly::RenameNamespaceRequest.new(storage_name: @storage, from: from, to: to)
+
+ gitaly_client_call(:rename_namespace, request)
+ end
+
+ private
+
+ def gitaly_client_call(type, request)
+ GitalyClient.call(@storage, :namespace_service, type, request)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb
new file mode 100644
index 00000000000..831cfd1e014
--- /dev/null
+++ b/lib/gitlab/gitaly_client/operation_service.rb
@@ -0,0 +1,321 @@
+module Gitlab
+ module GitalyClient
+ class OperationService
+ include Gitlab::EncodingHelper
+
+ MAX_MSG_SIZE = 128.kilobytes.freeze
+
+ def initialize(repository)
+ @gitaly_repo = repository.gitaly_repository
+ @repository = repository
+ end
+
+ def rm_tag(tag_name, user)
+ request = Gitaly::UserDeleteTagRequest.new(
+ repository: @gitaly_repo,
+ tag_name: encode_binary(tag_name),
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly
+ )
+
+ response = GitalyClient.call(@repository.storage, :operation_service, :user_delete_tag, request)
+
+ if pre_receive_error = response.pre_receive_error.presence
+ raise Gitlab::Git::HooksService::PreReceiveError, pre_receive_error
+ end
+ end
+
+ def add_tag(tag_name, user, target, message)
+ request = Gitaly::UserCreateTagRequest.new(
+ repository: @gitaly_repo,
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
+ tag_name: encode_binary(tag_name),
+ target_revision: encode_binary(target),
+ message: encode_binary(message.to_s)
+ )
+
+ response = GitalyClient.call(@repository.storage, :operation_service, :user_create_tag, request)
+ if pre_receive_error = response.pre_receive_error.presence
+ raise Gitlab::Git::HooksService::PreReceiveError, pre_receive_error
+ elsif response.exists
+ raise Gitlab::Git::Repository::TagExistsError
+ end
+
+ Util.gitlab_tag_from_gitaly_tag(@repository, response.tag)
+ rescue GRPC::FailedPrecondition => e
+ raise Gitlab::Git::Repository::InvalidRef, e
+ end
+
+ def user_create_branch(branch_name, user, start_point)
+ request = Gitaly::UserCreateBranchRequest.new(
+ repository: @gitaly_repo,
+ branch_name: encode_binary(branch_name),
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
+ start_point: encode_binary(start_point)
+ )
+ response = GitalyClient.call(@repository.storage, :operation_service,
+ :user_create_branch, request)
+
+ if response.pre_receive_error.present?
+ raise Gitlab::Git::HooksService::PreReceiveError.new(response.pre_receive_error)
+ end
+
+ branch = response.branch
+ return nil unless branch
+
+ target_commit = Gitlab::Git::Commit.decorate(@repository, branch.target_commit)
+ Gitlab::Git::Branch.new(@repository, branch.name, target_commit.id, target_commit)
+ end
+
+ def user_delete_branch(branch_name, user)
+ request = Gitaly::UserDeleteBranchRequest.new(
+ repository: @gitaly_repo,
+ branch_name: encode_binary(branch_name),
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly
+ )
+
+ response = GitalyClient.call(@repository.storage, :operation_service, :user_delete_branch, request)
+
+ if pre_receive_error = response.pre_receive_error.presence
+ raise Gitlab::Git::HooksService::PreReceiveError, pre_receive_error
+ end
+ end
+
+ def user_merge_branch(user, source_sha, target_branch, message)
+ request_enum = QueueEnumerator.new
+ response_enum = GitalyClient.call(
+ @repository.storage,
+ :operation_service,
+ :user_merge_branch,
+ request_enum.each
+ )
+
+ request_enum.push(
+ Gitaly::UserMergeBranchRequest.new(
+ repository: @gitaly_repo,
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
+ commit_id: source_sha,
+ branch: encode_binary(target_branch),
+ message: encode_binary(message)
+ )
+ )
+
+ yield response_enum.next.commit_id
+
+ request_enum.push(Gitaly::UserMergeBranchRequest.new(apply: true))
+
+ second_response = response_enum.next
+
+ if second_response.pre_receive_error.present?
+ raise Gitlab::Git::HooksService::PreReceiveError, second_response.pre_receive_error
+ end
+
+ branch_update = second_response.branch_update
+ return if branch_update.nil?
+ raise Gitlab::Git::CommitError.new('failed to apply merge to branch') unless branch_update.commit_id.present?
+
+ Gitlab::Git::OperationService::BranchUpdate.from_gitaly(branch_update)
+ ensure
+ request_enum.close
+ end
+
+ def user_ff_branch(user, source_sha, target_branch)
+ request = Gitaly::UserFFBranchRequest.new(
+ repository: @gitaly_repo,
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
+ commit_id: source_sha,
+ branch: encode_binary(target_branch)
+ )
+
+ branch_update = GitalyClient.call(
+ @repository.storage,
+ :operation_service,
+ :user_ff_branch,
+ request
+ ).branch_update
+ Gitlab::Git::OperationService::BranchUpdate.from_gitaly(branch_update)
+ end
+
+ def user_cherry_pick(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ call_cherry_pick_or_revert(:cherry_pick,
+ user: user,
+ commit: commit,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository)
+ end
+
+ def user_revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ call_cherry_pick_or_revert(:revert,
+ user: user,
+ commit: commit,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository)
+ end
+
+ def user_rebase(user, rebase_id, branch:, branch_sha:, remote_repository:, remote_branch:)
+ request = Gitaly::UserRebaseRequest.new(
+ repository: @gitaly_repo,
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
+ rebase_id: rebase_id.to_s,
+ branch: encode_binary(branch),
+ branch_sha: branch_sha,
+ remote_repository: remote_repository.gitaly_repository,
+ remote_branch: encode_binary(remote_branch)
+ )
+
+ response = GitalyClient.call(
+ @repository.storage,
+ :operation_service,
+ :user_rebase,
+ request,
+ remote_storage: remote_repository.storage
+ )
+
+ if response.pre_receive_error.presence
+ raise Gitlab::Git::HooksService::PreReceiveError, response.pre_receive_error
+ elsif response.git_error.presence
+ raise Gitlab::Git::Repository::GitError, response.git_error
+ else
+ response.rebase_sha
+ end
+ end
+
+ def user_squash(user, squash_id, branch, start_sha, end_sha, author, message)
+ request = Gitaly::UserSquashRequest.new(
+ repository: @gitaly_repo,
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
+ squash_id: squash_id.to_s,
+ branch: encode_binary(branch),
+ start_sha: start_sha,
+ end_sha: end_sha,
+ author: Gitlab::Git::User.from_gitlab(author).to_gitaly,
+ commit_message: encode_binary(message)
+ )
+
+ response = GitalyClient.call(
+ @repository.storage,
+ :operation_service,
+ :user_squash,
+ request
+ )
+
+ if response.git_error.presence
+ raise Gitlab::Git::Repository::GitError, response.git_error
+ end
+
+ response.squash_sha
+ end
+
+ def user_commit_files(
+ user, branch_name, commit_message, actions, author_email, author_name,
+ start_branch_name, start_repository)
+
+ req_enum = Enumerator.new do |y|
+ header = user_commit_files_request_header(user, branch_name,
+ commit_message, actions, author_email, author_name,
+ start_branch_name, start_repository)
+
+ y.yield Gitaly::UserCommitFilesRequest.new(header: header)
+
+ actions.each do |action|
+ action_header = user_commit_files_action_header(action)
+ y.yield Gitaly::UserCommitFilesRequest.new(
+ action: Gitaly::UserCommitFilesAction.new(header: action_header)
+ )
+
+ reader = binary_stringio(action[:content])
+
+ until reader.eof?
+ chunk = reader.read(MAX_MSG_SIZE)
+
+ y.yield Gitaly::UserCommitFilesRequest.new(
+ action: Gitaly::UserCommitFilesAction.new(content: chunk)
+ )
+ end
+ end
+ end
+
+ response = GitalyClient.call(@repository.storage, :operation_service,
+ :user_commit_files, req_enum, remote_storage: start_repository.storage)
+
+ if (pre_receive_error = response.pre_receive_error.presence)
+ raise Gitlab::Git::HooksService::PreReceiveError, pre_receive_error
+ end
+
+ if (index_error = response.index_error.presence)
+ raise Gitlab::Git::Index::IndexError, index_error
+ end
+
+ Gitlab::Git::OperationService::BranchUpdate.from_gitaly(response.branch_update)
+ end
+
+ private
+
+ def call_cherry_pick_or_revert(rpc, user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ request_class = "Gitaly::User#{rpc.to_s.camelcase}Request".constantize
+
+ request = request_class.new(
+ repository: @gitaly_repo,
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
+ commit: commit.to_gitaly_commit,
+ branch_name: encode_binary(branch_name),
+ message: encode_binary(message),
+ start_branch_name: encode_binary(start_branch_name.to_s),
+ start_repository: start_repository.gitaly_repository
+ )
+
+ response = GitalyClient.call(
+ @repository.storage,
+ :operation_service,
+ :"user_#{rpc}",
+ request,
+ remote_storage: start_repository.storage
+ )
+
+ handle_cherry_pick_or_revert_response(response)
+ end
+
+ def handle_cherry_pick_or_revert_response(response)
+ if response.pre_receive_error.presence
+ raise Gitlab::Git::HooksService::PreReceiveError, response.pre_receive_error
+ elsif response.commit_error.presence
+ raise Gitlab::Git::CommitError, response.commit_error
+ elsif response.create_tree_error.presence
+ raise Gitlab::Git::Repository::CreateTreeError, response.create_tree_error
+ else
+ Gitlab::Git::OperationService::BranchUpdate.from_gitaly(response.branch_update)
+ end
+ end
+
+ def user_commit_files_request_header(
+ user, branch_name, commit_message, actions, author_email, author_name,
+ start_branch_name, start_repository)
+
+ Gitaly::UserCommitFilesRequestHeader.new(
+ repository: @gitaly_repo,
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
+ branch_name: encode_binary(branch_name),
+ commit_message: encode_binary(commit_message),
+ commit_author_name: encode_binary(author_name),
+ commit_author_email: encode_binary(author_email),
+ start_branch_name: encode_binary(start_branch_name),
+ start_repository: start_repository.gitaly_repository
+ )
+ end
+
+ def user_commit_files_action_header(action)
+ Gitaly::UserCommitFilesActionHeader.new(
+ action: action[:action].upcase.to_sym,
+ file_path: encode_binary(action[:file_path]),
+ previous_path: encode_binary(action[:previous_path]),
+ base64_content: action[:encoding] == 'base64'
+ )
+ rescue RangeError
+ raise ArgumentError, "Unknown action '#{action[:action]}'"
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/queue_enumerator.rb b/lib/gitlab/gitaly_client/queue_enumerator.rb
new file mode 100644
index 00000000000..b8018029552
--- /dev/null
+++ b/lib/gitlab/gitaly_client/queue_enumerator.rb
@@ -0,0 +1,28 @@
+module Gitlab
+ module GitalyClient
+ class QueueEnumerator
+ def initialize
+ @queue = Queue.new
+ end
+
+ def push(elem)
+ @queue << elem
+ end
+
+ def close
+ push(:close)
+ end
+
+ def each
+ return enum_for(:each) unless block_given?
+
+ loop do
+ elem = @queue.pop
+ break if elem == :close
+
+ yield elem
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb
index 8ef873d5848..ba6b577fd17 100644
--- a/lib/gitlab/gitaly_client/ref_service.rb
+++ b/lib/gitlab/gitaly_client/ref_service.rb
@@ -14,12 +14,18 @@ module Gitlab
request = Gitaly::FindAllBranchesRequest.new(repository: @gitaly_repo)
response = GitalyClient.call(@storage, :ref_service, :find_all_branches, request)
- response.flat_map do |message|
- message.branches.map do |branch|
- target_commit = Gitlab::Git::Commit.decorate(@repository, branch.target)
- Gitlab::Git::Branch.new(@repository, branch.name, branch.target.id, target_commit)
- end
- end
+ consume_find_all_branches_response(response)
+ end
+
+ def merged_branches(branch_names = [])
+ request = Gitaly::FindAllBranchesRequest.new(
+ repository: @gitaly_repo,
+ merged_only: true,
+ merged_branches: branch_names.map { |s| encode_binary(s) }
+ )
+ response = GitalyClient.call(@storage, :ref_service, :find_all_branches, request)
+
+ consume_find_all_branches_response(response)
end
def default_branch_name
@@ -46,7 +52,8 @@ module Gitlab
commit_id: commit_id,
prefix: ref_prefix
)
- encode!(GitalyClient.call(@storage, :ref_service, :find_ref_name, request).name.dup)
+ response = GitalyClient.call(@storage, :ref_service, :find_ref_name, request, timeout: GitalyClient.medium_timeout)
+ encode!(response.name.dup)
end
def count_tag_names
@@ -61,7 +68,7 @@ module Gitlab
request = Gitaly::FindLocalBranchesRequest.new(repository: @gitaly_repo)
request.sort_by = sort_by_param(sort_by) if sort_by
response = GitalyClient.call(@storage, :ref_service, :find_local_branches, request)
- consume_branches_response(response)
+ consume_find_local_branches_response(response)
end
def tags
@@ -71,7 +78,7 @@ module Gitlab
end
def ref_exists?(ref_name)
- request = Gitaly::RefExistsRequest.new(repository: @gitaly_repo, ref: GitalyClient.encode(ref_name))
+ request = Gitaly::RefExistsRequest.new(repository: @gitaly_repo, ref: encode_binary(ref_name))
response = GitalyClient.call(@storage, :ref_service, :ref_exists, request)
response.value
rescue GRPC::InvalidArgument => e
@@ -81,7 +88,7 @@ module Gitlab
def find_branch(branch_name)
request = Gitaly::FindBranchRequest.new(
repository: @gitaly_repo,
- name: GitalyClient.encode(branch_name)
+ name: encode_binary(branch_name)
)
response = GitalyClient.call(@repository.storage, :ref_service, :find_branch, request)
@@ -95,8 +102,8 @@ module Gitlab
def create_branch(ref, start_point)
request = Gitaly::CreateBranchRequest.new(
repository: @gitaly_repo,
- name: GitalyClient.encode(ref),
- start_point: GitalyClient.encode(start_point)
+ name: encode_binary(ref),
+ start_point: encode_binary(start_point)
)
response = GitalyClient.call(@repository.storage, :ref_service, :create_branch, request)
@@ -120,12 +127,50 @@ module Gitlab
def delete_branch(branch_name)
request = Gitaly::DeleteBranchRequest.new(
repository: @gitaly_repo,
- name: GitalyClient.encode(branch_name)
+ name: encode_binary(branch_name)
)
GitalyClient.call(@repository.storage, :ref_service, :delete_branch, request)
end
+ def delete_refs(refs: [], except_with_prefixes: [])
+ request = Gitaly::DeleteRefsRequest.new(
+ repository: @gitaly_repo,
+ refs: refs.map { |r| encode_binary(r) },
+ except_with_prefix: except_with_prefixes.map { |r| encode_binary(r) }
+ )
+
+ response = GitalyClient.call(@repository.storage, :ref_service, :delete_refs, request)
+
+ raise Gitlab::Git::Repository::GitError, response.git_error if response.git_error.present?
+ end
+
+ # Limit: 0 implies no limit, thus all tag names will be returned
+ def tag_names_contains_sha(sha, limit: 0)
+ request = Gitaly::ListTagNamesContainingCommitRequest.new(
+ repository: @gitaly_repo,
+ commit_id: sha,
+ limit: limit
+ )
+
+ stream = GitalyClient.call(@repository.storage, :ref_service, :list_tag_names_containing_commit, request)
+
+ consume_ref_contains_sha_response(stream, :tag_names)
+ end
+
+ # Limit: 0 implies no limit, thus all tag names will be returned
+ def branch_names_contains_sha(sha, limit: 0)
+ request = Gitaly::ListBranchNamesContainingCommitRequest.new(
+ repository: @gitaly_repo,
+ commit_id: sha,
+ limit: limit
+ )
+
+ stream = GitalyClient.call(@repository.storage, :ref_service, :list_branch_names_containing_commit, request)
+
+ consume_ref_contains_sha_response(stream, :branch_names)
+ end
+
private
def consume_refs_response(response)
@@ -137,10 +182,11 @@ module Gitlab
enum_value = Gitaly::FindLocalBranchesRequest::SortBy.resolve(sort_by.upcase.to_sym)
raise ArgumentError, "Invalid sort_by key `#{sort_by}`" unless enum_value
+
enum_value
end
- def consume_branches_response(response)
+ def consume_find_local_branches_response(response)
response.flat_map do |message|
message.branches.map do |gitaly_branch|
Gitlab::Git::Branch.new(
@@ -153,24 +199,21 @@ module Gitlab
end
end
- def consume_tags_response(response)
+ def consume_find_all_branches_response(response)
response.flat_map do |message|
- message.tags.map do |gitaly_tag|
- if gitaly_tag.target_commit.present?
- gitaly_commit = Gitlab::Git::Commit.decorate(@repository, gitaly_tag.target_commit)
- end
-
- Gitlab::Git::Tag.new(
- @repository,
- encode!(gitaly_tag.name.dup),
- gitaly_tag.id,
- gitaly_commit,
- encode!(gitaly_tag.message.chomp)
- )
+ message.branches.map do |branch|
+ target_commit = Gitlab::Git::Commit.decorate(@repository, branch.target)
+ Gitlab::Git::Branch.new(@repository, branch.name, branch.target.id, target_commit)
end
end
end
+ def consume_tags_response(response)
+ response.flat_map do |message|
+ message.tags.map { |gitaly_tag| Util.gitlab_tag_from_gitaly_tag(@repository, gitaly_tag) }
+ end
+ end
+
def commit_from_local_branches_response(response)
# Git messages have no encoding enforcements. However, in the UI we only
# handle UTF-8, so basically we cross our fingers that the message force
@@ -198,6 +241,13 @@ module Gitlab
Gitlab::Git::Commit.decorate(@repository, hash)
end
+ def consume_ref_contains_sha_response(stream, collection_name)
+ stream.each_with_object([]) do |response, array|
+ encoded_names = response.send(collection_name).map { |b| Gitlab::Git.ref_name(b) } # rubocop:disable GitlabSecurity/PublicSend
+ array.concat(encoded_names)
+ end
+ end
+
def invalid_ref!(message)
raise Gitlab::Git::Repository::InvalidRef.new(message)
end
diff --git a/lib/gitlab/gitaly_client/remote_service.rb b/lib/gitlab/gitaly_client/remote_service.rb
new file mode 100644
index 00000000000..58c356edfd1
--- /dev/null
+++ b/lib/gitlab/gitaly_client/remote_service.rb
@@ -0,0 +1,70 @@
+module Gitlab
+ module GitalyClient
+ class RemoteService
+ MAX_MSG_SIZE = 128.kilobytes.freeze
+
+ def initialize(repository)
+ @repository = repository
+ @gitaly_repo = repository.gitaly_repository
+ @storage = repository.storage
+ end
+
+ def add_remote(name, url, mirror_refmaps)
+ request = Gitaly::AddRemoteRequest.new(
+ repository: @gitaly_repo,
+ name: name,
+ url: url,
+ mirror_refmaps: Array.wrap(mirror_refmaps).map(&:to_s)
+ )
+
+ GitalyClient.call(@storage, :remote_service, :add_remote, request)
+ end
+
+ def remove_remote(name)
+ request = Gitaly::RemoveRemoteRequest.new(repository: @gitaly_repo, name: name)
+
+ response = GitalyClient.call(@storage, :remote_service, :remove_remote, request)
+
+ response.result
+ end
+
+ def fetch_internal_remote(repository)
+ request = Gitaly::FetchInternalRemoteRequest.new(
+ repository: @gitaly_repo,
+ remote_repository: repository.gitaly_repository
+ )
+
+ response = GitalyClient.call(@storage, :remote_service,
+ :fetch_internal_remote, request,
+ remote_storage: repository.storage)
+
+ response.result
+ end
+
+ def update_remote_mirror(ref_name, only_branches_matching)
+ req_enum = Enumerator.new do |y|
+ y.yield Gitaly::UpdateRemoteMirrorRequest.new(
+ repository: @gitaly_repo,
+ ref_name: ref_name
+ )
+
+ current_size = 0
+
+ slices = only_branches_matching.slice_before do |branch_name|
+ current_size += branch_name.bytesize
+
+ next false if current_size < MAX_MSG_SIZE
+
+ current_size = 0
+ end
+
+ slices.each do |slice|
+ y.yield Gitaly::UpdateRemoteMirrorRequest.new(only_branches_matching: slice)
+ end
+ end
+
+ GitalyClient.call(@storage, :remote_service, :update_remote_mirror, req_enum)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb
index 177a1284f38..e1bc2f9ab61 100644
--- a/lib/gitlab/gitaly_client/repository_service.rb
+++ b/lib/gitlab/gitaly_client/repository_service.rb
@@ -1,6 +1,10 @@
module Gitlab
module GitalyClient
class RepositoryService
+ include Gitlab::EncodingHelper
+
+ MAX_MSG_SIZE = 128.kilobytes.freeze
+
def initialize(repository)
@repository = repository
@gitaly_repo = repository.gitaly_repository
@@ -10,7 +14,9 @@ module Gitlab
def exists?
request = Gitaly::RepositoryExistsRequest.new(repository: @gitaly_repo)
- GitalyClient.call(@storage, :repository_service, :repository_exists, request).exists
+ response = GitalyClient.call(@storage, :repository_service, :repository_exists, request, timeout: GitalyClient.fast_timeout)
+
+ response.exists
end
def garbage_collect(create_bitmap)
@@ -30,16 +36,20 @@ module Gitlab
def repository_size
request = Gitaly::RepositorySizeRequest.new(repository: @gitaly_repo)
- GitalyClient.call(@storage, :repository_service, :repository_size, request).size
+ response = GitalyClient.call(@storage, :repository_service, :repository_size, request)
+ response.size
end
def apply_gitattributes(revision)
- request = Gitaly::ApplyGitattributesRequest.new(repository: @gitaly_repo, revision: revision)
+ request = Gitaly::ApplyGitattributesRequest.new(repository: @gitaly_repo, revision: encode_binary(revision))
GitalyClient.call(@storage, :repository_service, :apply_gitattributes, request)
end
- def fetch_remote(remote, ssh_auth: nil, forced: false, no_tags: false)
- request = Gitaly::FetchRemoteRequest.new(repository: @gitaly_repo, remote: remote, force: forced, no_tags: no_tags)
+ def fetch_remote(remote, ssh_auth:, forced:, no_tags:, timeout:, prune: true)
+ request = Gitaly::FetchRemoteRequest.new(
+ repository: @gitaly_repo, remote: remote, force: forced,
+ no_tags: no_tags, timeout: timeout, no_prune: !prune
+ )
if ssh_auth&.ssh_import?
if ssh_auth.ssh_key_auth? && ssh_auth.ssh_private_key.present?
@@ -53,6 +63,200 @@ module Gitlab
GitalyClient.call(@storage, :repository_service, :fetch_remote, request)
end
+
+ def create_repository
+ request = Gitaly::CreateRepositoryRequest.new(repository: @gitaly_repo)
+ GitalyClient.call(@storage, :repository_service, :create_repository, request)
+ end
+
+ def has_local_branches?
+ request = Gitaly::HasLocalBranchesRequest.new(repository: @gitaly_repo)
+ response = GitalyClient.call(@storage, :repository_service, :has_local_branches, request, timeout: GitalyClient.fast_timeout)
+
+ response.value
+ end
+
+ def find_merge_base(*revisions)
+ request = Gitaly::FindMergeBaseRequest.new(
+ repository: @gitaly_repo,
+ revisions: revisions.map { |r| encode_binary(r) }
+ )
+
+ response = GitalyClient.call(@storage, :repository_service, :find_merge_base, request)
+ response.base.presence
+ end
+
+ def fork_repository(source_repository)
+ request = Gitaly::CreateForkRequest.new(
+ repository: @gitaly_repo,
+ source_repository: source_repository.gitaly_repository
+ )
+
+ GitalyClient.call(
+ @storage,
+ :repository_service,
+ :create_fork,
+ request,
+ remote_storage: source_repository.storage,
+ timeout: GitalyClient.default_timeout
+ )
+ end
+
+ def import_repository(source)
+ request = Gitaly::CreateRepositoryFromURLRequest.new(
+ repository: @gitaly_repo,
+ url: source
+ )
+
+ GitalyClient.call(
+ @storage,
+ :repository_service,
+ :create_repository_from_url,
+ request,
+ timeout: GitalyClient.default_timeout
+ )
+ end
+
+ def rebase_in_progress?(rebase_id)
+ request = Gitaly::IsRebaseInProgressRequest.new(
+ repository: @gitaly_repo,
+ rebase_id: rebase_id.to_s
+ )
+
+ response = GitalyClient.call(
+ @storage,
+ :repository_service,
+ :is_rebase_in_progress,
+ request,
+ timeout: GitalyClient.default_timeout
+ )
+
+ response.in_progress
+ end
+
+ def squash_in_progress?(squash_id)
+ request = Gitaly::IsSquashInProgressRequest.new(
+ repository: @gitaly_repo,
+ squash_id: squash_id.to_s
+ )
+
+ response = GitalyClient.call(
+ @storage,
+ :repository_service,
+ :is_squash_in_progress,
+ request,
+ timeout: GitalyClient.default_timeout
+ )
+
+ response.in_progress
+ end
+
+ def fetch_source_branch(source_repository, source_branch, local_ref)
+ request = Gitaly::FetchSourceBranchRequest.new(
+ repository: @gitaly_repo,
+ source_repository: source_repository.gitaly_repository,
+ source_branch: source_branch.b,
+ target_ref: local_ref.b
+ )
+
+ response = GitalyClient.call(
+ @storage,
+ :repository_service,
+ :fetch_source_branch,
+ request,
+ remote_storage: source_repository.storage
+ )
+
+ response.result
+ end
+
+ def fsck
+ request = Gitaly::FsckRequest.new(repository: @gitaly_repo)
+ response = GitalyClient.call(@storage, :repository_service, :fsck, request)
+
+ if response.error.empty?
+ return "", 0
+ else
+ return response.error.b, 1
+ end
+ end
+
+ def create_bundle(save_path)
+ request = Gitaly::CreateBundleRequest.new(repository: @gitaly_repo)
+ response = GitalyClient.call(
+ @storage,
+ :repository_service,
+ :create_bundle,
+ request,
+ timeout: GitalyClient.default_timeout
+ )
+
+ File.open(save_path, 'wb') do |f|
+ response.each do |message|
+ f.write(message.data)
+ end
+ end
+ end
+
+ def create_from_bundle(bundle_path)
+ request = Gitaly::CreateRepositoryFromBundleRequest.new(repository: @gitaly_repo)
+ enum = Enumerator.new do |y|
+ File.open(bundle_path, 'rb') do |f|
+ while data = f.read(MAX_MSG_SIZE)
+ request.data = data
+
+ y.yield request
+
+ request = Gitaly::CreateRepositoryFromBundleRequest.new
+ end
+ end
+ end
+
+ GitalyClient.call(
+ @storage,
+ :repository_service,
+ :create_repository_from_bundle,
+ enum,
+ timeout: GitalyClient.default_timeout
+ )
+ end
+
+ def write_ref(ref_path, ref, old_ref, shell)
+ request = Gitaly::WriteRefRequest.new(
+ repository: @gitaly_repo,
+ ref: ref_path.b,
+ revision: ref.b,
+ shell: shell
+ )
+ request.old_revision = old_ref.b unless old_ref.nil?
+
+ response = GitalyClient.call(@storage, :repository_service, :write_ref, request)
+
+ raise Gitlab::Git::CommandError, encode!(response.error) if response.error.present?
+
+ true
+ end
+
+ def write_config(full_path:)
+ request = Gitaly::WriteConfigRequest.new(repository: @gitaly_repo, full_path: full_path)
+ response = GitalyClient.call(
+ @storage,
+ :repository_service,
+ :write_config,
+ request,
+ timeout: GitalyClient.fast_timeout
+ )
+
+ raise Gitlab::Git::OSError.new(response.error) unless response.error.empty?
+ end
+
+ def license_short_name
+ request = Gitaly::FindLicenseRequest.new(repository: @gitaly_repo)
+
+ response = GitalyClient.call(@storage, :repository_service, :find_license, request, timeout: GitalyClient.fast_timeout)
+
+ response.license_short_name.presence
+ end
end
end
end
diff --git a/lib/gitlab/gitaly_client/server_service.rb b/lib/gitlab/gitaly_client/server_service.rb
new file mode 100644
index 00000000000..2e1076d1f66
--- /dev/null
+++ b/lib/gitlab/gitaly_client/server_service.rb
@@ -0,0 +1,16 @@
+module Gitlab
+ module GitalyClient
+ # Meant for extraction of server data, and later maybe to perform misc task
+ #
+ # Not meant for connection logic, look in Gitlab::GitalyClient
+ class ServerService
+ def initialize(storage)
+ @storage = storage
+ end
+
+ def info
+ GitalyClient.call(@storage, :server_service, :server_info, Gitaly::ServerInfoRequest.new)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/util.rb b/lib/gitlab/gitaly_client/util.rb
index 8fc937496af..a8c6d478de8 100644
--- a/lib/gitlab/gitaly_client/util.rb
+++ b/lib/gitlab/gitaly_client/util.rb
@@ -2,12 +2,39 @@ module Gitlab
module GitalyClient
module Util
class << self
- def repository(repository_storage, relative_path)
+ def repository(repository_storage, relative_path, gl_repository)
+ git_object_directory = Gitlab::Git::Env['GIT_OBJECT_DIRECTORY_RELATIVE'].presence ||
+ Gitlab::Git::Env['GIT_OBJECT_DIRECTORY'].presence
+ git_alternate_object_directories =
+ Array.wrap(Gitlab::Git::Env['GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE']).presence ||
+ Array.wrap(Gitlab::Git::Env['GIT_ALTERNATE_OBJECT_DIRECTORIES']).flat_map { |d| d.split(File::PATH_SEPARATOR) }
+
Gitaly::Repository.new(
storage_name: repository_storage,
relative_path: relative_path,
- git_object_directory: Gitlab::Git::Env['GIT_OBJECT_DIRECTORY'].to_s,
- git_alternate_object_directories: Array.wrap(Gitlab::Git::Env['GIT_ALTERNATE_OBJECT_DIRECTORIES'])
+ gl_repository: gl_repository.to_s,
+ git_object_directory: git_object_directory.to_s,
+ git_alternate_object_directories: git_alternate_object_directories
+ )
+ end
+
+ def git_repository(gitaly_repository)
+ Gitlab::Git::Repository.new(gitaly_repository.storage_name,
+ gitaly_repository.relative_path,
+ gitaly_repository.gl_repository)
+ end
+
+ def gitlab_tag_from_gitaly_tag(repository, gitaly_tag)
+ if gitaly_tag.target_commit.present?
+ commit = Gitlab::Git::Commit.decorate(repository, gitaly_tag.target_commit)
+ end
+
+ Gitlab::Git::Tag.new(
+ repository,
+ Gitlab::EncodingHelper.encode!(gitaly_tag.name.dup),
+ gitaly_tag.id,
+ commit,
+ Gitlab::EncodingHelper.encode!(gitaly_tag.message.chomp)
)
end
end
diff --git a/lib/gitlab/gitaly_client/wiki_file.rb b/lib/gitlab/gitaly_client/wiki_file.rb
new file mode 100644
index 00000000000..47c60c92484
--- /dev/null
+++ b/lib/gitlab/gitaly_client/wiki_file.rb
@@ -0,0 +1,9 @@
+module Gitlab
+ module GitalyClient
+ class WikiFile
+ ATTRS = %i(name mime_type path raw_data).freeze
+
+ include AttributesBag
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/wiki_page.rb b/lib/gitlab/gitaly_client/wiki_page.rb
new file mode 100644
index 00000000000..a02d15db5dd
--- /dev/null
+++ b/lib/gitlab/gitaly_client/wiki_page.rb
@@ -0,0 +1,30 @@
+module Gitlab
+ module GitalyClient
+ class WikiPage
+ ATTRS = %i(title format url_path path name historical raw_data).freeze
+
+ include AttributesBag
+ include Gitlab::EncodingHelper
+
+ def initialize(params)
+ super
+
+ # All gRPC strings in a response are frozen, so we get an unfrozen
+ # version here so appending to `raw_data` doesn't blow up.
+ @raw_data = @raw_data.dup
+
+ @title = encode_utf8(@title)
+ @path = encode_utf8(@path)
+ @name = encode_utf8(@name)
+ end
+
+ def historical?
+ @historical
+ end
+
+ def format
+ @format.to_sym
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitaly_client/wiki_service.rb b/lib/gitlab/gitaly_client/wiki_service.rb
new file mode 100644
index 00000000000..0d8dd5cb8f4
--- /dev/null
+++ b/lib/gitlab/gitaly_client/wiki_service.rb
@@ -0,0 +1,210 @@
+require 'stringio'
+
+module Gitlab
+ module GitalyClient
+ class WikiService
+ include Gitlab::EncodingHelper
+
+ MAX_MSG_SIZE = 128.kilobytes.freeze
+
+ def initialize(repository)
+ @gitaly_repo = repository.gitaly_repository
+ @repository = repository
+ end
+
+ def write_page(name, format, content, commit_details)
+ request = Gitaly::WikiWritePageRequest.new(
+ repository: @gitaly_repo,
+ name: encode_binary(name),
+ format: format.to_s,
+ commit_details: gitaly_commit_details(commit_details)
+ )
+
+ strio = binary_stringio(content)
+
+ enum = Enumerator.new do |y|
+ until strio.eof?
+ request.content = strio.read(MAX_MSG_SIZE)
+
+ y.yield request
+
+ request = Gitaly::WikiWritePageRequest.new
+ end
+ end
+
+ response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_write_page, enum)
+ if error = response.duplicate_error.presence
+ raise Gitlab::Git::Wiki::DuplicatePageError, error
+ end
+ end
+
+ def update_page(page_path, title, format, content, commit_details)
+ request = Gitaly::WikiUpdatePageRequest.new(
+ repository: @gitaly_repo,
+ page_path: encode_binary(page_path),
+ title: encode_binary(title),
+ format: format.to_s,
+ commit_details: gitaly_commit_details(commit_details)
+ )
+
+ strio = binary_stringio(content)
+
+ enum = Enumerator.new do |y|
+ until strio.eof?
+ request.content = strio.read(MAX_MSG_SIZE)
+
+ y.yield request
+
+ request = Gitaly::WikiUpdatePageRequest.new
+ end
+ end
+
+ GitalyClient.call(@repository.storage, :wiki_service, :wiki_update_page, enum)
+ end
+
+ def delete_page(page_path, commit_details)
+ request = Gitaly::WikiDeletePageRequest.new(
+ repository: @gitaly_repo,
+ page_path: encode_binary(page_path),
+ commit_details: gitaly_commit_details(commit_details)
+ )
+
+ GitalyClient.call(@repository.storage, :wiki_service, :wiki_delete_page, request)
+ end
+
+ def find_page(title:, version: nil, dir: nil)
+ request = Gitaly::WikiFindPageRequest.new(
+ repository: @gitaly_repo,
+ title: encode_binary(title),
+ revision: encode_binary(version),
+ directory: encode_binary(dir)
+ )
+
+ response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_find_page, request)
+
+ wiki_page_from_iterator(response)
+ end
+
+ def get_all_pages
+ request = Gitaly::WikiGetAllPagesRequest.new(repository: @gitaly_repo)
+ response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_all_pages, request)
+ pages = []
+
+ loop do
+ page, version = wiki_page_from_iterator(response) { |message| message.end_of_page }
+
+ break unless page && version
+
+ pages << [page, version]
+ end
+
+ pages
+ end
+
+ # options:
+ # :page - The Integer page number.
+ # :per_page - The number of items per page.
+ # :limit - Total number of items to return.
+ def page_versions(page_path, options)
+ request = Gitaly::WikiGetPageVersionsRequest.new(
+ repository: @gitaly_repo,
+ page_path: encode_binary(page_path),
+ page: options[:page] || 1,
+ per_page: options[:per_page] || Gollum::Page.per_page
+ )
+
+ stream = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_page_versions, request)
+
+ versions = []
+ stream.each do |message|
+ message.versions.each do |version|
+ versions << new_wiki_page_version(version)
+ end
+ end
+
+ versions
+ end
+
+ def find_file(name, revision)
+ request = Gitaly::WikiFindFileRequest.new(
+ repository: @gitaly_repo,
+ name: encode_binary(name),
+ revision: encode_binary(revision)
+ )
+
+ response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_find_file, request)
+ wiki_file = nil
+
+ response.each do |message|
+ next unless message.name.present?
+
+ if wiki_file
+ wiki_file.raw_data << message.raw_data
+ else
+ wiki_file = GitalyClient::WikiFile.new(message.to_h)
+ # All gRPC strings in a response are frozen, so we get
+ # an unfrozen version here so appending in the else clause below doesn't blow up.
+ wiki_file.raw_data = wiki_file.raw_data.dup
+ end
+ end
+
+ wiki_file
+ end
+
+ def get_formatted_data(title:, dir: nil, version: nil)
+ request = Gitaly::WikiGetFormattedDataRequest.new(
+ repository: @gitaly_repo,
+ title: encode_binary(title),
+ revision: encode_binary(version),
+ directory: encode_binary(dir)
+ )
+
+ response = GitalyClient.call(@repository.storage, :wiki_service, :wiki_get_formatted_data, request)
+ response.reduce("") { |memo, msg| memo << msg.data }
+ end
+
+ private
+
+ # If a block is given and the yielded value is truthy, iteration will be
+ # stopped early at that point; else the iterator is consumed entirely.
+ # The iterator is traversed with `next` to allow resuming the iteration.
+ def wiki_page_from_iterator(iterator)
+ wiki_page = version = nil
+
+ while message = iterator.next
+ break if block_given? && yield(message)
+
+ page = message.page
+ next unless page
+
+ if wiki_page
+ wiki_page.raw_data << page.raw_data
+ else
+ wiki_page = GitalyClient::WikiPage.new(page.to_h)
+
+ version = new_wiki_page_version(page.version)
+ end
+ end
+
+ [wiki_page, version]
+ rescue StopIteration
+ [wiki_page, version]
+ end
+
+ def new_wiki_page_version(version)
+ Gitlab::Git::WikiPageVersion.new(
+ Gitlab::Git::Commit.decorate(@repository, version.commit),
+ version.format
+ )
+ end
+
+ def gitaly_commit_details(commit_details)
+ Gitaly::WikiCommitDetails.new(
+ name: encode_binary(commit_details.name),
+ email: encode_binary(commit_details.email),
+ message: encode_binary(commit_details.message)
+ )
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import.rb b/lib/gitlab/github_import.rb
new file mode 100644
index 00000000000..65b5e30c70f
--- /dev/null
+++ b/lib/gitlab/github_import.rb
@@ -0,0 +1,38 @@
+module Gitlab
+ module GithubImport
+ def self.refmap
+ [:heads, :tags, '+refs/pull/*/head:refs/merge-requests/*/head']
+ end
+
+ def self.new_client_for(project, token: nil, parallel: true)
+ token_to_use = token || project.import_data&.credentials&.fetch(:user)
+
+ Client.new(token_to_use, parallel: parallel)
+ end
+
+ # Inserts a raw row and returns the ID of the inserted row.
+ #
+ # attributes - The attributes/columns to set.
+ # relation - An ActiveRecord::Relation to use for finding the ID of the row
+ # when using MySQL.
+ def self.insert_and_return_id(attributes, relation)
+ # We use bulk_insert here so we can bypass any queries executed by
+ # callbacks or validation rules, as doing this wouldn't scale when
+ # importing very large projects.
+ result = Gitlab::Database
+ .bulk_insert(relation.table_name, [attributes], return_ids: true)
+
+ # MySQL doesn't support returning the IDs of a bulk insert in a way that
+ # is not a pain, so in this case we'll issue an extra query instead.
+ result.first ||
+ relation.where(iid: attributes[:iid]).limit(1).pluck(:id).first
+ end
+
+ # Returns the ID of the ghost user.
+ def self.ghost_user_id
+ key = 'github-import/ghost-user-id'
+
+ Caching.read_integer(key) || Caching.write(key, User.select(:id).ghost.id)
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/bulk_importing.rb b/lib/gitlab/github_import/bulk_importing.rb
new file mode 100644
index 00000000000..147597289cf
--- /dev/null
+++ b/lib/gitlab/github_import/bulk_importing.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module BulkImporting
+ # Builds and returns an Array of objects to bulk insert into the
+ # database.
+ #
+ # enum - An Enumerable that returns the objects to turn into database
+ # rows.
+ def build_database_rows(enum)
+ enum.each_with_object([]) do |(object, _), rows|
+ rows << build(object) unless already_imported?(object)
+ end
+ end
+
+ # Bulk inserts the given rows into the database.
+ def bulk_insert(model, rows, batch_size: 100)
+ rows.each_slice(batch_size) do |slice|
+ Gitlab::Database.bulk_insert(model.table_name, slice)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/caching.rb b/lib/gitlab/github_import/caching.rb
new file mode 100644
index 00000000000..b08f133794f
--- /dev/null
+++ b/lib/gitlab/github_import/caching.rb
@@ -0,0 +1,151 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Caching
+ # The default timeout of the cache keys.
+ TIMEOUT = 24.hours.to_i
+
+ WRITE_IF_GREATER_SCRIPT = <<-EOF.strip_heredoc.freeze
+ local key, value, ttl = KEYS[1], tonumber(ARGV[1]), ARGV[2]
+ local existing = tonumber(redis.call("get", key))
+
+ if existing == nil or value > existing then
+ redis.call("set", key, value)
+ redis.call("expire", key, ttl)
+ return true
+ else
+ return false
+ end
+ EOF
+
+ # Reads a cache key.
+ #
+ # If the key exists and has a non-empty value its TTL is refreshed
+ # automatically.
+ #
+ # raw_key - The cache key to read.
+ # timeout - The new timeout of the key if the key is to be refreshed.
+ def self.read(raw_key, timeout: TIMEOUT)
+ key = cache_key_for(raw_key)
+ value = Redis::Cache.with { |redis| redis.get(key) }
+
+ if value.present?
+ # We refresh the expiration time so frequently used keys stick
+ # around, removing the need for querying the database as much as
+ # possible.
+ #
+ # A key may be empty when we looked up a GitHub user (for example) but
+ # did not find a matching GitLab user. In that case we _don't_ want to
+ # refresh the TTL so we automatically pick up the right data when said
+ # user were to register themselves on the GitLab instance.
+ Redis::Cache.with { |redis| redis.expire(key, timeout) }
+ end
+
+ value
+ end
+
+ # Reads an integer from the cache, or returns nil if no value was found.
+ #
+ # See Caching.read for more information.
+ def self.read_integer(raw_key, timeout: TIMEOUT)
+ value = read(raw_key, timeout: timeout)
+
+ value.to_i if value.present?
+ end
+
+ # Sets a cache key to the given value.
+ #
+ # key - The cache key to write.
+ # value - The value to set.
+ # timeout - The time after which the cache key should expire.
+ def self.write(raw_key, value, timeout: TIMEOUT)
+ key = cache_key_for(raw_key)
+
+ Redis::Cache.with do |redis|
+ redis.set(key, value, ex: timeout)
+ end
+
+ value
+ end
+
+ # Adds a value to a set.
+ #
+ # raw_key - The key of the set to add the value to.
+ # value - The value to add to the set.
+ # timeout - The new timeout of the key.
+ def self.set_add(raw_key, value, timeout: TIMEOUT)
+ key = cache_key_for(raw_key)
+
+ Redis::Cache.with do |redis|
+ redis.multi do |m|
+ m.sadd(key, value)
+ m.expire(key, timeout)
+ end
+ end
+ end
+
+ # Returns true if the given value is present in the set.
+ #
+ # raw_key - The key of the set to check.
+ # value - The value to check for.
+ def self.set_includes?(raw_key, value)
+ key = cache_key_for(raw_key)
+
+ Redis::Cache.with do |redis|
+ redis.sismember(key, value)
+ end
+ end
+
+ # Sets multiple keys to a given value.
+ #
+ # mapping - A Hash mapping the cache keys to their values.
+ # timeout - The time after which the cache key should expire.
+ def self.write_multiple(mapping, timeout: TIMEOUT)
+ Redis::Cache.with do |redis|
+ redis.multi do |multi|
+ mapping.each do |raw_key, value|
+ multi.set(cache_key_for(raw_key), value, ex: timeout)
+ end
+ end
+ end
+ end
+
+ # Sets the expiration time of a key.
+ #
+ # raw_key - The key for which to change the timeout.
+ # timeout - The new timeout.
+ def self.expire(raw_key, timeout)
+ key = cache_key_for(raw_key)
+
+ Redis::Cache.with do |redis|
+ redis.expire(key, timeout)
+ end
+ end
+
+ # Sets a key to the given integer but only if the existing value is
+ # smaller than the given value.
+ #
+ # This method uses a Lua script to ensure the read and write are atomic.
+ #
+ # raw_key - The key to set.
+ # value - The new value for the key.
+ # timeout - The key timeout in seconds.
+ #
+ # Returns true when the key was overwritten, false otherwise.
+ def self.write_if_greater(raw_key, value, timeout: TIMEOUT)
+ key = cache_key_for(raw_key)
+ val = Redis::Cache.with do |redis|
+ redis
+ .eval(WRITE_IF_GREATER_SCRIPT, keys: [key], argv: [value, timeout])
+ end
+
+ val ? true : false
+ end
+
+ def self.cache_key_for(raw_key)
+ "#{Redis::Cache::CACHE_NAMESPACE}:#{raw_key}"
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/client.rb b/lib/gitlab/github_import/client.rb
index 0550f9695bd..4f160e4a447 100644
--- a/lib/gitlab/github_import/client.rb
+++ b/lib/gitlab/github_import/client.rb
@@ -1,147 +1,220 @@
+# frozen_string_literal: true
+
module Gitlab
module GithubImport
+ # HTTP client for interacting with the GitHub API.
+ #
+ # This class is basically a fancy wrapped around Octokit while adding some
+ # functionality to deal with rate limiting and parallel imports. Usage is
+ # mostly the same as Octokit, for example:
+ #
+ # client = GithubImport::Client.new('hunter2')
+ #
+ # client.labels.each do |label|
+ # puts label.name
+ # end
class Client
- GITHUB_SAFE_REMAINING_REQUESTS = 100
- GITHUB_SAFE_SLEEP_TIME = 500
+ include ::Gitlab::Utils::StrongMemoize
+
+ attr_reader :octokit
+
+ # A single page of data and the corresponding page number.
+ Page = Struct.new(:objects, :number)
+
+ # The minimum number of requests we want to keep available.
+ #
+ # We don't use a value of 0 as multiple threads may be using the same
+ # token in parallel. This could result in all of them hitting the GitHub
+ # rate limit at once. The threshold is put in place to not hit the limit
+ # in most cases.
+ RATE_LIMIT_THRESHOLD = 50
+
+ # token - The GitHub API token to use.
+ #
+ # per_page - The number of objects that should be displayed per page.
+ #
+ # parallel - When set to true hitting the rate limit will result in a
+ # dedicated error being raised. When set to `false` we will
+ # instead just `sleep()` until the rate limit is reset. Setting
+ # this value to `true` for parallel importing is crucial as
+ # otherwise hitting the rate limit will result in a thread
+ # being blocked in a `sleep()` call for up to an hour.
+ def initialize(token, per_page: 100, parallel: true)
+ @octokit = Octokit::Client.new(
+ access_token: token,
+ per_page: per_page,
+ api_endpoint: api_endpoint
+ )
- attr_reader :access_token, :host, :api_version
+ @octokit.connection_options[:ssl] = { verify: verify_ssl }
- def initialize(access_token, host: nil, api_version: 'v3')
- @access_token = access_token
- @host = host.to_s.sub(%r{/+\z}, '')
- @api_version = api_version
- @users = {}
+ @parallel = parallel
+ end
- if access_token
- ::Octokit.auto_paginate = false
- end
+ def parallel?
+ @parallel
end
- def api
- @api ||= ::Octokit::Client.new(
- access_token: access_token,
- api_endpoint: api_endpoint,
- # If there is no config, we're connecting to github.com and we
- # should verify ssl.
- connection_options: {
- ssl: { verify: config ? config['verify_ssl'] : true }
- }
- )
+ # Returns the details of a GitHub user.
+ #
+ # username - The username of the user.
+ def user(username)
+ with_rate_limit { octokit.user(username) }
end
- def client
- unless config
- raise Projects::ImportService::Error,
- 'OAuth configuration for GitHub missing.'
- end
+ # Returns the details of a GitHub repository.
+ #
+ # name - The path (in the form `owner/repository`) of the repository.
+ def repository(name)
+ with_rate_limit { octokit.repo(name) }
+ end
- @client ||= ::OAuth2::Client.new(
- config.app_id,
- config.app_secret,
- github_options.merge(ssl: { verify: config['verify_ssl'] })
- )
+ def labels(*args)
+ each_object(:labels, *args)
end
- def authorize_url(redirect_uri)
- client.auth_code.authorize_url({
- redirect_uri: redirect_uri,
- scope: "repo, user, user:email"
- })
+ def milestones(*args)
+ each_object(:milestones, *args)
end
- def get_token(code)
- client.auth_code.get_token(code).token
+ def releases(*args)
+ each_object(:releases, *args)
end
- def method_missing(method, *args, &block)
- if api.respond_to?(method)
- request(method, *args, &block)
- else
- super(method, *args, &block)
+ # Fetches data from the GitHub API and yields a Page object for every page
+ # of data, without loading all of them into memory.
+ #
+ # method - The Octokit method to use for getting the data.
+ # args - Arguments to pass to the Octokit method.
+ #
+ # rubocop: disable GitlabSecurity/PublicSend
+ def each_page(method, *args, &block)
+ return to_enum(__method__, method, *args) unless block_given?
+
+ page =
+ if args.last.is_a?(Hash) && args.last[:page]
+ args.last[:page]
+ else
+ 1
+ end
+
+ collection = with_rate_limit { octokit.public_send(method, *args) }
+ next_url = octokit.last_response.rels[:next]
+
+ yield Page.new(collection, page)
+
+ while next_url
+ response = with_rate_limit { next_url.get }
+ next_url = response.rels[:next]
+
+ yield Page.new(response.data, page += 1)
end
end
- def respond_to?(method)
- api.respond_to?(method) || super
+ # Iterates over all of the objects for the given method (e.g. `:labels`).
+ #
+ # method - The method to send to Octokit for querying data.
+ # args - Any arguments to pass to the Octokit method.
+ def each_object(method, *args, &block)
+ return to_enum(__method__, method, *args) unless block_given?
+
+ each_page(method, *args) do |page|
+ page.objects.each do |object|
+ yield object
+ end
+ end
end
- def user(login)
- return nil unless login.present?
- return @users[login] if @users.key?(login)
+ # Yields the supplied block, responding to any rate limit errors.
+ #
+ # The exact strategy used for handling rate limiting errors depends on
+ # whether we are running in parallel mode or not. For more information see
+ # `#rate_or_wait_for_rate_limit`.
+ def with_rate_limit
+ return yield unless rate_limiting_enabled?
- @users[login] = api.user(login)
- end
+ request_count_counter.increment
- private
+ raise_or_wait_for_rate_limit unless requests_remaining?
- def api_endpoint
- if host.present? && api_version.present?
- "#{host}/api/#{api_version}"
- else
- github_options[:site]
+ begin
+ yield
+ rescue Octokit::TooManyRequests
+ raise_or_wait_for_rate_limit
+
+ # This retry will only happen when running in sequential mode as we'll
+ # raise an error in parallel mode.
+ retry
end
end
- def config
- Gitlab.config.omniauth.providers.find { |provider| provider.name == "github" }
+ # Returns `true` if we're still allowed to perform API calls.
+ def requests_remaining?
+ remaining_requests > RATE_LIMIT_THRESHOLD
+ end
+
+ def remaining_requests
+ octokit.rate_limit.remaining
end
- def github_options
- if config
- config["args"]["client_options"].deep_symbolize_keys
+ def raise_or_wait_for_rate_limit
+ rate_limit_counter.increment
+
+ if parallel?
+ raise RateLimitError
else
- OmniAuth::Strategies::GitHub.default_options[:client_options].symbolize_keys
+ sleep(rate_limit_resets_in)
end
end
- def rate_limit
- api.rate_limit!
- # GitHub Rate Limit API returns 404 when the rate limit is
- # disabled. In this case we just want to return gracefully
- # instead of spitting out an error.
- rescue Octokit::NotFound
- nil
+ def rate_limit_resets_in
+ # We add a few seconds to the rate limit so we don't _immediately_
+ # resume when the rate limit resets as this may result in us performing
+ # a request before GitHub has a chance to reset the limit.
+ octokit.rate_limit.resets_in + 5
end
- def has_rate_limit?
- return @has_rate_limit if defined?(@has_rate_limit)
-
- @has_rate_limit = rate_limit.present?
+ def rate_limiting_enabled?
+ strong_memoize(:rate_limiting_enabled) do
+ api_endpoint.include?('.github.com')
+ end
end
- def rate_limit_exceed?
- has_rate_limit? && rate_limit.remaining <= GITHUB_SAFE_REMAINING_REQUESTS
+ def api_endpoint
+ custom_api_endpoint || default_api_endpoint
end
- def rate_limit_sleep_time
- rate_limit.resets_in + GITHUB_SAFE_SLEEP_TIME
+ def custom_api_endpoint
+ github_omniauth_provider.dig('args', 'client_options', 'site')
end
- def request(method, *args, &block)
- sleep rate_limit_sleep_time if rate_limit_exceed?
+ def default_api_endpoint
+ OmniAuth::Strategies::GitHub.default_options[:client_options][:site]
+ end
- data = api.__send__(method, *args) # rubocop:disable GitlabSecurity/PublicSend
- return data unless data.is_a?(Array)
+ def verify_ssl
+ github_omniauth_provider.fetch('verify_ssl', true)
+ end
- last_response = api.last_response
+ def github_omniauth_provider
+ @github_omniauth_provider ||=
+ Gitlab.config.omniauth.providers
+ .find { |provider| provider.name == 'github' }
+ .to_h
+ end
- if block_given?
- yield data
- # api.last_response could change while we're yielding (e.g. fetching labels for each PR)
- # so we cache our own last response
- each_response_page(last_response, &block)
- else
- each_response_page(last_response) { |page| data.concat(page) }
- data
- end
+ def rate_limit_counter
+ @rate_limit_counter ||= Gitlab::Metrics.counter(
+ :github_importer_rate_limit_hits,
+ 'The number of times we hit the GitHub rate limit when importing projects'
+ )
end
- def each_response_page(last_response)
- while last_response.rels[:next]
- sleep rate_limit_sleep_time if rate_limit_exceed?
- last_response = last_response.rels[:next].get
- yield last_response.data if last_response.data.is_a?(Array)
- end
+ def request_count_counter
+ @request_counter ||= Gitlab::Metrics.counter(
+ :github_importer_request_count,
+ 'The number of GitHub API calls performed when importing projects'
+ )
end
end
end
diff --git a/lib/gitlab/github_import/importer/diff_note_importer.rb b/lib/gitlab/github_import/importer/diff_note_importer.rb
new file mode 100644
index 00000000000..8274f37d358
--- /dev/null
+++ b/lib/gitlab/github_import/importer/diff_note_importer.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class DiffNoteImporter
+ attr_reader :note, :project, :client, :user_finder
+
+ # note - An instance of `Gitlab::GithubImport::Representation::DiffNote`.
+ # project - An instance of `Project`.
+ # client - An instance of `Gitlab::GithubImport::Client`.
+ def initialize(note, project, client)
+ @note = note
+ @project = project
+ @client = client
+ @user_finder = UserFinder.new(project, client)
+ end
+
+ def execute
+ return unless (mr_id = find_merge_request_id)
+
+ author_id, author_found = user_finder.author_id_for(note)
+
+ note_body =
+ MarkdownText.format(note.note, note.author, author_found)
+
+ attributes = {
+ noteable_type: 'MergeRequest',
+ noteable_id: mr_id,
+ project_id: project.id,
+ author_id: author_id,
+ note: note_body,
+ system: false,
+ commit_id: note.commit_id,
+ line_code: note.line_code,
+ type: 'LegacyDiffNote',
+ created_at: note.created_at,
+ updated_at: note.updated_at,
+ st_diff: note.diff_hash.to_yaml
+ }
+
+ # It's possible that during an import we'll insert tens of thousands
+ # of diff notes. If we were to use the Note/LegacyDiffNote model here
+ # we'd also have to run additional queries for both validations and
+ # callbacks, putting a lot of pressure on the database.
+ #
+ # To work around this we're using bulk_insert with a single row. This
+ # allows us to efficiently insert data (even if it's just 1 row)
+ # without having to use all sorts of hacks to disable callbacks.
+ Gitlab::Database.bulk_insert(LegacyDiffNote.table_name, [attributes])
+ rescue ActiveRecord::InvalidForeignKey
+ # It's possible the project and the issue have been deleted since
+ # scheduling this job. In this case we'll just skip creating the note.
+ end
+
+ # Returns the ID of the merge request this note belongs to.
+ def find_merge_request_id
+ GithubImport::IssuableFinder.new(project, note).database_id
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/diff_notes_importer.rb b/lib/gitlab/github_import/importer/diff_notes_importer.rb
new file mode 100644
index 00000000000..966f12c5c2f
--- /dev/null
+++ b/lib/gitlab/github_import/importer/diff_notes_importer.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class DiffNotesImporter
+ include ParallelScheduling
+
+ def representation_class
+ Representation::DiffNote
+ end
+
+ def importer_class
+ DiffNoteImporter
+ end
+
+ def sidekiq_worker_class
+ ImportDiffNoteWorker
+ end
+
+ def collection_method
+ :pull_requests_comments
+ end
+
+ def id_for_already_imported_cache(note)
+ note.id
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/issue_and_label_links_importer.rb b/lib/gitlab/github_import/importer/issue_and_label_links_importer.rb
new file mode 100644
index 00000000000..bad064b76c8
--- /dev/null
+++ b/lib/gitlab/github_import/importer/issue_and_label_links_importer.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class IssueAndLabelLinksImporter
+ attr_reader :issue, :project, :client
+
+ # issue - An instance of `Gitlab::GithubImport::Representation::Issue`.
+ # project - An instance of `Project`
+ # client - An instance of `Gitlab::GithubImport::Client`
+ def initialize(issue, project, client)
+ @issue = issue
+ @project = project
+ @client = client
+ end
+
+ def execute
+ IssueImporter.import_if_issue(issue, project, client)
+ LabelLinksImporter.new(issue, project, client).execute
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/issue_importer.rb b/lib/gitlab/github_import/importer/issue_importer.rb
new file mode 100644
index 00000000000..31fefebf787
--- /dev/null
+++ b/lib/gitlab/github_import/importer/issue_importer.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class IssueImporter
+ attr_reader :project, :issue, :client, :user_finder, :milestone_finder,
+ :issuable_finder
+
+ # Imports an issue if it's a regular issue and not a pull request.
+ def self.import_if_issue(issue, project, client)
+ new(issue, project, client).execute unless issue.pull_request?
+ end
+
+ # issue - An instance of `Gitlab::GithubImport::Representation::Issue`.
+ # project - An instance of `Project`
+ # client - An instance of `Gitlab::GithubImport::Client`
+ def initialize(issue, project, client)
+ @issue = issue
+ @project = project
+ @client = client
+ @user_finder = UserFinder.new(project, client)
+ @milestone_finder = MilestoneFinder.new(project)
+ @issuable_finder = GithubImport::IssuableFinder.new(project, issue)
+ end
+
+ def execute
+ Issue.transaction do
+ if (issue_id = create_issue)
+ create_assignees(issue_id)
+ issuable_finder.cache_database_id(issue_id)
+ end
+ end
+ end
+
+ # Creates a new GitLab issue for the current GitHub issue.
+ #
+ # Returns the ID of the created issue as an Integer. If the issue
+ # couldn't be created this method will return `nil` instead.
+ def create_issue
+ author_id, author_found = user_finder.author_id_for(issue)
+
+ description =
+ MarkdownText.format(issue.description, issue.author, author_found)
+
+ attributes = {
+ iid: issue.iid,
+ title: issue.truncated_title,
+ author_id: author_id,
+ project_id: project.id,
+ description: description,
+ milestone_id: milestone_finder.id_for(issue),
+ state: issue.state,
+ created_at: issue.created_at,
+ updated_at: issue.updated_at
+ }
+
+ GithubImport.insert_and_return_id(attributes, project.issues)
+ rescue ActiveRecord::InvalidForeignKey
+ # It's possible the project has been deleted since scheduling this
+ # job. In this case we'll just skip creating the issue.
+ end
+
+ # Stores all issue assignees in the database.
+ #
+ # issue_id - The ID of the created issue.
+ def create_assignees(issue_id)
+ assignees = []
+
+ issue.assignees.each do |assignee|
+ if (user_id = user_finder.user_id_for(assignee))
+ assignees << { issue_id: issue_id, user_id: user_id }
+ end
+ end
+
+ Gitlab::Database.bulk_insert(IssueAssignee.table_name, assignees)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/issues_importer.rb b/lib/gitlab/github_import/importer/issues_importer.rb
new file mode 100644
index 00000000000..ac6d0666b3a
--- /dev/null
+++ b/lib/gitlab/github_import/importer/issues_importer.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class IssuesImporter
+ include ParallelScheduling
+
+ def importer_class
+ IssueAndLabelLinksImporter
+ end
+
+ def representation_class
+ Representation::Issue
+ end
+
+ def sidekiq_worker_class
+ ImportIssueWorker
+ end
+
+ def collection_method
+ :issues
+ end
+
+ def id_for_already_imported_cache(issue)
+ issue.number
+ end
+
+ def collection_options
+ { state: 'all', sort: 'created', direction: 'asc' }
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/label_links_importer.rb b/lib/gitlab/github_import/importer/label_links_importer.rb
new file mode 100644
index 00000000000..2001b7e3482
--- /dev/null
+++ b/lib/gitlab/github_import/importer/label_links_importer.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class LabelLinksImporter
+ attr_reader :issue, :project, :client, :label_finder
+
+ # issue - An instance of `Gitlab::GithubImport::Representation::Issue`
+ # project - An instance of `Project`
+ # client - An instance of `Gitlab::GithubImport::Client`
+ def initialize(issue, project, client)
+ @issue = issue
+ @project = project
+ @client = client
+ @label_finder = LabelFinder.new(project)
+ end
+
+ def execute
+ create_labels
+ end
+
+ def create_labels
+ time = Time.zone.now
+ rows = []
+ target_id = find_target_id
+
+ issue.label_names.each do |label_name|
+ # Although unlikely it's technically possible for an issue to be
+ # given a label that was created and assigned after we imported all
+ # the project's labels.
+ next unless (label_id = label_finder.id_for(label_name))
+
+ rows << {
+ label_id: label_id,
+ target_id: target_id,
+ target_type: issue.issuable_type,
+ created_at: time,
+ updated_at: time
+ }
+ end
+
+ Gitlab::Database.bulk_insert(LabelLink.table_name, rows)
+ end
+
+ def find_target_id
+ GithubImport::IssuableFinder.new(project, issue).database_id
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/labels_importer.rb b/lib/gitlab/github_import/importer/labels_importer.rb
new file mode 100644
index 00000000000..a73033d35ba
--- /dev/null
+++ b/lib/gitlab/github_import/importer/labels_importer.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class LabelsImporter
+ include BulkImporting
+
+ attr_reader :project, :client, :existing_labels
+
+ # project - An instance of `Project`.
+ # client - An instance of `Gitlab::GithubImport::Client`.
+ def initialize(project, client)
+ @project = project
+ @client = client
+ @existing_labels = project.labels.pluck(:title).to_set
+ end
+
+ def execute
+ bulk_insert(Label, build_labels)
+ build_labels_cache
+ end
+
+ def build_labels
+ build_database_rows(each_label)
+ end
+
+ def already_imported?(label)
+ existing_labels.include?(label.name)
+ end
+
+ def build_labels_cache
+ LabelFinder.new(project).build_cache
+ end
+
+ def build(label)
+ time = Time.zone.now
+
+ {
+ title: label.name,
+ color: '#' + label.color,
+ project_id: project.id,
+ type: 'ProjectLabel',
+ created_at: time,
+ updated_at: time
+ }
+ end
+
+ def each_label
+ client.labels(project.import_source)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/milestones_importer.rb b/lib/gitlab/github_import/importer/milestones_importer.rb
new file mode 100644
index 00000000000..c53480e828a
--- /dev/null
+++ b/lib/gitlab/github_import/importer/milestones_importer.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class MilestonesImporter
+ include BulkImporting
+
+ attr_reader :project, :client, :existing_milestones
+
+ # project - An instance of `Project`
+ # client - An instance of `Gitlab::GithubImport::Client`
+ def initialize(project, client)
+ @project = project
+ @client = client
+ @existing_milestones = project.milestones.pluck(:iid).to_set
+ end
+
+ def execute
+ bulk_insert(Milestone, build_milestones)
+ build_milestones_cache
+ end
+
+ def build_milestones
+ build_database_rows(each_milestone)
+ end
+
+ def already_imported?(milestone)
+ existing_milestones.include?(milestone.number)
+ end
+
+ def build_milestones_cache
+ MilestoneFinder.new(project).build_cache
+ end
+
+ def build(milestone)
+ {
+ iid: milestone.number,
+ title: milestone.title,
+ description: milestone.description,
+ project_id: project.id,
+ state: state_for(milestone),
+ created_at: milestone.created_at,
+ updated_at: milestone.updated_at
+ }
+ end
+
+ def state_for(milestone)
+ milestone.state == 'open' ? :active : :closed
+ end
+
+ def each_milestone
+ client.milestones(project.import_source, state: 'all')
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/note_importer.rb b/lib/gitlab/github_import/importer/note_importer.rb
new file mode 100644
index 00000000000..c890f2df360
--- /dev/null
+++ b/lib/gitlab/github_import/importer/note_importer.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class NoteImporter
+ attr_reader :note, :project, :client, :user_finder
+
+ # note - An instance of `Gitlab::GithubImport::Representation::Note`.
+ # project - An instance of `Project`.
+ # client - An instance of `Gitlab::GithubImport::Client`.
+ def initialize(note, project, client)
+ @note = note
+ @project = project
+ @client = client
+ @user_finder = UserFinder.new(project, client)
+ end
+
+ def execute
+ return unless (noteable_id = find_noteable_id)
+
+ author_id, author_found = user_finder.author_id_for(note)
+
+ note_body =
+ MarkdownText.format(note.note, note.author, author_found)
+
+ attributes = {
+ noteable_type: note.noteable_type,
+ noteable_id: noteable_id,
+ project_id: project.id,
+ author_id: author_id,
+ note: note_body,
+ system: false,
+ created_at: note.created_at,
+ updated_at: note.updated_at
+ }
+
+ # We're using bulk_insert here so we can bypass any validations and
+ # callbacks. Running these would result in a lot of unnecessary SQL
+ # queries being executed when importing large projects.
+ Gitlab::Database.bulk_insert(Note.table_name, [attributes])
+ rescue ActiveRecord::InvalidForeignKey
+ # It's possible the project and the issue have been deleted since
+ # scheduling this job. In this case we'll just skip creating the note.
+ end
+
+ # Returns the ID of the issue or merge request to create the note for.
+ def find_noteable_id
+ GithubImport::IssuableFinder.new(project, note).database_id
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/notes_importer.rb b/lib/gitlab/github_import/importer/notes_importer.rb
new file mode 100644
index 00000000000..5aec760ea5f
--- /dev/null
+++ b/lib/gitlab/github_import/importer/notes_importer.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class NotesImporter
+ include ParallelScheduling
+
+ def importer_class
+ NoteImporter
+ end
+
+ def representation_class
+ Representation::Note
+ end
+
+ def sidekiq_worker_class
+ ImportNoteWorker
+ end
+
+ def collection_method
+ :issues_comments
+ end
+
+ def id_for_already_imported_cache(note)
+ note.id
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/pull_request_importer.rb b/lib/gitlab/github_import/importer/pull_request_importer.rb
new file mode 100644
index 00000000000..49d859f9624
--- /dev/null
+++ b/lib/gitlab/github_import/importer/pull_request_importer.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class PullRequestImporter
+ attr_reader :pull_request, :project, :client, :user_finder,
+ :milestone_finder, :issuable_finder
+
+ # pull_request - An instance of
+ # `Gitlab::GithubImport::Representation::PullRequest`.
+ # project - An instance of `Project`
+ # client - An instance of `Gitlab::GithubImport::Client`
+ def initialize(pull_request, project, client)
+ @pull_request = pull_request
+ @project = project
+ @client = client
+ @user_finder = UserFinder.new(project, client)
+ @milestone_finder = MilestoneFinder.new(project)
+ @issuable_finder =
+ GithubImport::IssuableFinder.new(project, pull_request)
+ end
+
+ def execute
+ if (mr_id = create_merge_request)
+ issuable_finder.cache_database_id(mr_id)
+ end
+ end
+
+ # Creates the merge request and returns its ID.
+ #
+ # This method will return `nil` if the merge request could not be
+ # created.
+ def create_merge_request
+ author_id, author_found = user_finder.author_id_for(pull_request)
+
+ description = MarkdownText
+ .format(pull_request.description, pull_request.author, author_found)
+
+ # This work must be wrapped in a transaction as otherwise we can leave
+ # behind incomplete data in the event of an error. This can then lead
+ # to duplicate key errors when jobs are retried.
+ MergeRequest.transaction do
+ attributes = {
+ iid: pull_request.iid,
+ title: pull_request.truncated_title,
+ description: description,
+ source_project_id: project.id,
+ target_project_id: project.id,
+ source_branch: pull_request.formatted_source_branch,
+ target_branch: pull_request.target_branch,
+ state: pull_request.state,
+ milestone_id: milestone_finder.id_for(pull_request),
+ author_id: author_id,
+ assignee_id: user_finder.assignee_id_for(pull_request),
+ created_at: pull_request.created_at,
+ updated_at: pull_request.updated_at
+ }
+
+ # When creating merge requests there are a lot of hooks that may
+ # run, for many different reasons. Many of these hooks (e.g. the
+ # ones used for rendering Markdown) are completely unnecessary and
+ # may even lead to transaction timeouts.
+ #
+ # To ensure importing pull requests has a minimal impact and can
+ # complete in a reasonable time we bypass all the hooks by inserting
+ # the row and then retrieving it. We then only perform the
+ # additional work that is strictly necessary.
+ merge_request_id = GithubImport
+ .insert_and_return_id(attributes, project.merge_requests)
+
+ merge_request = project.merge_requests.find(merge_request_id)
+
+ # These fields are set so we can create the correct merge request
+ # diffs.
+ merge_request.source_branch_sha = pull_request.source_branch_sha
+ merge_request.target_branch_sha = pull_request.target_branch_sha
+
+ merge_request.keep_around_commit
+ merge_request.merge_request_diffs.create
+
+ merge_request.id
+ end
+ rescue ActiveRecord::InvalidForeignKey
+ # It's possible the project has been deleted since scheduling this
+ # job. In this case we'll just skip creating the merge request.
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/pull_requests_importer.rb b/lib/gitlab/github_import/importer/pull_requests_importer.rb
new file mode 100644
index 00000000000..e70361c163b
--- /dev/null
+++ b/lib/gitlab/github_import/importer/pull_requests_importer.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class PullRequestsImporter
+ include ParallelScheduling
+
+ def importer_class
+ PullRequestImporter
+ end
+
+ def representation_class
+ Representation::PullRequest
+ end
+
+ def sidekiq_worker_class
+ ImportPullRequestWorker
+ end
+
+ def id_for_already_imported_cache(pr)
+ pr.number
+ end
+
+ def each_object_to_import
+ super do |pr|
+ update_repository if update_repository?(pr)
+ yield pr
+ end
+ end
+
+ def update_repository
+ # We set this column _before_ fetching the repository, and this is
+ # deliberate. If we were to update this column after the fetch we may
+ # miss out on changes pushed during the fetch or between the fetch and
+ # updating the timestamp.
+ project.update_column(:last_repository_updated_at, Time.zone.now)
+
+ project.repository.fetch_remote('github', forced: false)
+
+ pname = project.path_with_namespace
+
+ Rails.logger
+ .info("GitHub importer finished updating repository for #{pname}")
+
+ repository_updates_counter.increment(project: pname)
+ end
+
+ def update_repository?(pr)
+ last_update = project.last_repository_updated_at || project.created_at
+
+ return false if pr.updated_at < last_update
+
+ # PRs may be updated without there actually being new commits, thus we
+ # check to make sure we only re-fetch if truly necessary.
+ !(commit_exists?(pr.head.sha) && commit_exists?(pr.base.sha))
+ end
+
+ def commit_exists?(sha)
+ project.repository.commit(sha).present?
+ end
+
+ def collection_method
+ :pull_requests
+ end
+
+ def collection_options
+ { state: 'all', sort: 'created', direction: 'asc' }
+ end
+
+ def repository_updates_counter
+ @repository_updates_counter ||= Gitlab::Metrics.counter(
+ :github_importer_repository_updates,
+ 'The number of times repositories have to be updated again'
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/releases_importer.rb b/lib/gitlab/github_import/importer/releases_importer.rb
new file mode 100644
index 00000000000..100f459fdcc
--- /dev/null
+++ b/lib/gitlab/github_import/importer/releases_importer.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class ReleasesImporter
+ include BulkImporting
+
+ attr_reader :project, :client, :existing_tags
+
+ # project - An instance of `Project`
+ # client - An instance of `Gitlab::GithubImport::Client`
+ def initialize(project, client)
+ @project = project
+ @client = client
+ @existing_tags = project.releases.pluck(:tag).to_set
+ end
+
+ def execute
+ bulk_insert(Release, build_releases)
+ end
+
+ def build_releases
+ build_database_rows(each_release)
+ end
+
+ def already_imported?(release)
+ existing_tags.include?(release.tag_name)
+ end
+
+ def build(release)
+ {
+ tag: release.tag_name,
+ description: description_for(release),
+ created_at: release.created_at,
+ updated_at: release.updated_at,
+ project_id: project.id
+ }
+ end
+
+ def each_release
+ client.releases(project.import_source)
+ end
+
+ def description_for(release)
+ if release.body.present?
+ release.body
+ else
+ "Release for tag #{release.tag_name}"
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/repository_importer.rb b/lib/gitlab/github_import/importer/repository_importer.rb
new file mode 100644
index 00000000000..ab0b751fe24
--- /dev/null
+++ b/lib/gitlab/github_import/importer/repository_importer.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ class RepositoryImporter
+ include Gitlab::ShellAdapter
+
+ attr_reader :project, :client
+
+ def initialize(project, client)
+ @project = project
+ @client = client
+ end
+
+ # Returns true if we should import the wiki for the project.
+ def import_wiki?
+ client.repository(project.import_source)&.has_wiki &&
+ !project.wiki_repository_exists?
+ end
+
+ # Imports the repository data.
+ #
+ # This method will return true if the data was imported successfully or
+ # the repository had already been imported before.
+ def execute
+ imported =
+ # It's possible a repository has already been imported when running
+ # this code, e.g. because we had to retry this job after
+ # `import_wiki?` raised a rate limit error. In this case we'll skip
+ # re-importing the main repository.
+ if project.empty_repo?
+ import_repository
+ else
+ true
+ end
+
+ update_clone_time if imported
+
+ imported = import_wiki_repository if import_wiki? && imported
+
+ imported
+ end
+
+ def import_repository
+ project.ensure_repository
+
+ refmap = Gitlab::GithubImport.refmap
+ project.repository.fetch_as_mirror(project.import_url, refmap: refmap, forced: true, remote_name: 'github')
+
+ true
+ rescue Gitlab::Git::Repository::NoRepository, Gitlab::Shell::Error => e
+ fail_import("Failed to import the repository: #{e.message}")
+ end
+
+ def import_wiki_repository
+ wiki_path = "#{project.disk_path}.wiki"
+ wiki_url = project.import_url.sub(/\.git\z/, '.wiki.git')
+ storage_path = project.repository_storage_path
+
+ gitlab_shell.import_repository(storage_path, wiki_path, wiki_url)
+
+ true
+ rescue Gitlab::Shell::Error => e
+ if e.message !~ /repository not exported/
+ project.create_wiki
+ fail_import("Failed to import the wiki: #{e.message}")
+ else
+ true
+ end
+ end
+
+ def update_clone_time
+ project.update_column(:last_repository_updated_at, Time.zone.now)
+ end
+
+ def fail_import(message)
+ project.mark_import_as_failed(message)
+ false
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/issuable_finder.rb b/lib/gitlab/github_import/issuable_finder.rb
new file mode 100644
index 00000000000..211915f1d87
--- /dev/null
+++ b/lib/gitlab/github_import/issuable_finder.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ # IssuableFinder can be used for caching and retrieving database IDs for
+ # issuable objects such as issues and pull requests. By caching these IDs we
+ # remove the need for running a lot of database queries when importing
+ # GitHub projects.
+ class IssuableFinder
+ attr_reader :project, :object
+
+ # The base cache key to use for storing/retrieving issuable IDs.
+ CACHE_KEY = 'github-import/issuable-finder/%{project}/%{type}/%{iid}'.freeze
+
+ # project - An instance of `Project`.
+ # object - The object to look up or set a database ID for.
+ def initialize(project, object)
+ @project = project
+ @object = object
+ end
+
+ # Returns the database ID for the object.
+ #
+ # This method will return `nil` if no ID could be found.
+ def database_id
+ val = Caching.read(cache_key)
+
+ val.to_i if val.present?
+ end
+
+ # Associates the given database ID with the current object.
+ #
+ # database_id - The ID of the corresponding database row.
+ def cache_database_id(database_id)
+ Caching.write(cache_key, database_id)
+ end
+
+ private
+
+ def cache_key
+ CACHE_KEY % {
+ project: project.id,
+ type: cache_key_type,
+ iid: cache_key_iid
+ }
+ end
+
+ # Returns the identifier to use for cache keys.
+ #
+ # For issues and pull requests this will be "Issue" or "MergeRequest"
+ # respectively. For diff notes this will return "MergeRequest", for
+ # regular notes it will either return "Issue" or "MergeRequest" depending
+ # on what type of object the note belongs to.
+ def cache_key_type
+ if object.respond_to?(:issuable_type)
+ object.issuable_type
+ elsif object.respond_to?(:noteable_type)
+ object.noteable_type
+ else
+ raise(
+ TypeError,
+ "Instances of #{object.class} are not supported"
+ )
+ end
+ end
+
+ def cache_key_iid
+ if object.respond_to?(:noteable_id)
+ object.noteable_id
+ elsif object.respond_to?(:iid)
+ object.iid
+ else
+ raise(
+ TypeError,
+ "Instances of #{object.class} are not supported"
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/label_finder.rb b/lib/gitlab/github_import/label_finder.rb
new file mode 100644
index 00000000000..9be071141db
--- /dev/null
+++ b/lib/gitlab/github_import/label_finder.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ class LabelFinder
+ attr_reader :project
+
+ # The base cache key to use for storing/retrieving label IDs.
+ CACHE_KEY = 'github-import/label-finder/%{project}/%{name}'.freeze
+
+ # project - An instance of `Project`.
+ def initialize(project)
+ @project = project
+ end
+
+ # Returns the label ID for the given name.
+ def id_for(name)
+ Caching.read_integer(cache_key_for(name))
+ end
+
+ def build_cache
+ mapping = @project
+ .labels
+ .pluck(:id, :name)
+ .each_with_object({}) do |(id, name), hash|
+ hash[cache_key_for(name)] = id
+ end
+
+ Caching.write_multiple(mapping)
+ end
+
+ def cache_key_for(name)
+ CACHE_KEY % { project: project.id, name: name }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/markdown_text.rb b/lib/gitlab/github_import/markdown_text.rb
new file mode 100644
index 00000000000..b25c4f7becf
--- /dev/null
+++ b/lib/gitlab/github_import/markdown_text.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ class MarkdownText
+ attr_reader :text, :author, :exists
+
+ def self.format(*args)
+ new(*args).to_s
+ end
+
+ # text - The Markdown text as a String.
+ # author - An instance of `Gitlab::GithubImport::Representation::User`
+ # exists - Boolean that indicates the user exists in the GitLab database.
+ def initialize(text, author, exists = false)
+ @text = text
+ @author = author
+ @exists = exists
+ end
+
+ def to_s
+ if exists
+ text
+ else
+ "*Created by: #{author.login}*\n\n#{text}"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/milestone_finder.rb b/lib/gitlab/github_import/milestone_finder.rb
new file mode 100644
index 00000000000..208d15dc144
--- /dev/null
+++ b/lib/gitlab/github_import/milestone_finder.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ class MilestoneFinder
+ attr_reader :project
+
+ # The base cache key to use for storing/retrieving milestone IDs.
+ CACHE_KEY = 'github-import/milestone-finder/%{project}/%{iid}'.freeze
+
+ # project - An instance of `Project`
+ def initialize(project)
+ @project = project
+ end
+
+ # issuable - An instance of `Gitlab::GithubImport::Representation::Issue`
+ # or `Gitlab::GithubImport::Representation::PullRequest`.
+ def id_for(issuable)
+ return unless issuable.milestone_number
+
+ Caching.read_integer(cache_key_for(issuable.milestone_number))
+ end
+
+ def build_cache
+ mapping = @project
+ .milestones
+ .pluck(:id, :iid)
+ .each_with_object({}) do |(id, iid), hash|
+ hash[cache_key_for(iid)] = id
+ end
+
+ Caching.write_multiple(mapping)
+ end
+
+ def cache_key_for(iid)
+ CACHE_KEY % { project: project.id, iid: iid }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/page_counter.rb b/lib/gitlab/github_import/page_counter.rb
new file mode 100644
index 00000000000..c3db2d0b469
--- /dev/null
+++ b/lib/gitlab/github_import/page_counter.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ # PageCounter can be used to keep track of the last imported page of a
+ # collection, allowing workers to resume where they left off in the event of
+ # an error.
+ class PageCounter
+ attr_reader :cache_key
+
+ # The base cache key to use for storing the last page number.
+ CACHE_KEY = 'github-importer/page-counter/%{project}/%{collection}'.freeze
+
+ def initialize(project, collection)
+ @cache_key = CACHE_KEY % { project: project.id, collection: collection }
+ end
+
+ # Sets the page number to the given value.
+ #
+ # Returns true if the page number was overwritten, false otherwise.
+ def set(page)
+ Caching.write_if_greater(cache_key, page)
+ end
+
+ # Returns the current value from the cache.
+ def current
+ Caching.read_integer(cache_key) || 1
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/parallel_importer.rb b/lib/gitlab/github_import/parallel_importer.rb
new file mode 100644
index 00000000000..6da11e6ef08
--- /dev/null
+++ b/lib/gitlab/github_import/parallel_importer.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ # The ParallelImporter schedules the importing of a GitHub project using
+ # Sidekiq.
+ class ParallelImporter
+ attr_reader :project
+
+ def self.async?
+ true
+ end
+
+ def self.imports_repository?
+ true
+ end
+
+ def initialize(project)
+ @project = project
+ end
+
+ def execute
+ jid = generate_jid
+
+ # The original import JID is the JID of the RepositoryImportWorker job,
+ # which will be removed once that job completes. Reusing that JID could
+ # result in StuckImportJobsWorker marking the job as stuck before we get
+ # to running Stage::ImportRepositoryWorker.
+ #
+ # We work around this by setting the JID to a custom generated one, then
+ # refreshing it in the various stages whenever necessary.
+ Gitlab::SidekiqStatus
+ .set(jid, StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
+
+ project.update_column(:import_jid, jid)
+
+ Stage::ImportRepositoryWorker
+ .perform_async(project.id)
+
+ true
+ end
+
+ def generate_jid
+ "github-importer/#{project.id}"
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/parallel_scheduling.rb b/lib/gitlab/github_import/parallel_scheduling.rb
new file mode 100644
index 00000000000..d4d1357f5a3
--- /dev/null
+++ b/lib/gitlab/github_import/parallel_scheduling.rb
@@ -0,0 +1,162 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module ParallelScheduling
+ attr_reader :project, :client, :page_counter, :already_imported_cache_key
+
+ # The base cache key to use for tracking already imported objects.
+ ALREADY_IMPORTED_CACHE_KEY =
+ 'github-importer/already-imported/%{project}/%{collection}'.freeze
+
+ # project - An instance of `Project`.
+ # client - An instance of `Gitlab::GithubImport::Client`.
+ # parallel - When set to true the objects will be imported in parallel.
+ def initialize(project, client, parallel: true)
+ @project = project
+ @client = client
+ @parallel = parallel
+ @page_counter = PageCounter.new(project, collection_method)
+ @already_imported_cache_key = ALREADY_IMPORTED_CACHE_KEY %
+ { project: project.id, collection: collection_method }
+ end
+
+ def parallel?
+ @parallel
+ end
+
+ def execute
+ retval =
+ if parallel?
+ parallel_import
+ else
+ sequential_import
+ end
+
+ # Once we have completed all work we can remove our "already exists"
+ # cache so we don't put too much pressure on Redis.
+ #
+ # We don't immediately remove it since it's technically possible for
+ # other instances of this job to still run, instead we set the
+ # expiration time to a lower value. This prevents the other jobs from
+ # still scheduling duplicates while. Since all work has already been
+ # completed those jobs will just cycle through any remaining pages while
+ # not scheduling anything.
+ Caching.expire(already_imported_cache_key, 15.minutes.to_i)
+
+ retval
+ end
+
+ # Imports all the objects in sequence in the current thread.
+ def sequential_import
+ each_object_to_import do |object|
+ repr = representation_class.from_api_response(object)
+
+ importer_class.new(repr, project, client).execute
+ end
+ end
+
+ # Imports all objects in parallel by scheduling a Sidekiq job for every
+ # individual object.
+ def parallel_import
+ waiter = JobWaiter.new
+
+ each_object_to_import do |object|
+ repr = representation_class.from_api_response(object)
+
+ sidekiq_worker_class
+ .perform_async(project.id, repr.to_hash, waiter.key)
+
+ waiter.jobs_remaining += 1
+ end
+
+ waiter
+ end
+
+ # The method that will be called for traversing through all the objects to
+ # import, yielding them to the supplied block.
+ def each_object_to_import
+ repo = project.import_source
+
+ # We inject the page number here to make sure that all importers always
+ # start where they left off. Simply starting over wouldn't work for
+ # repositories with a lot of data (e.g. tens of thousands of comments).
+ options = collection_options.merge(page: page_counter.current)
+
+ client.each_page(collection_method, repo, options) do |page|
+ # Technically it's possible that the same work is performed multiple
+ # times, as Sidekiq doesn't guarantee there will ever only be one
+ # instance of a job. In such a scenario it's possible for one job to
+ # have a lower page number (e.g. 5) compared to another (e.g. 10). In
+ # this case we skip over all the objects until we have caught up,
+ # reducing the number of duplicate jobs scheduled by the provided
+ # block.
+ next unless page_counter.set(page.number)
+
+ page.objects.each do |object|
+ next if already_imported?(object)
+
+ yield object
+
+ # We mark the object as imported immediately so we don't end up
+ # scheduling it multiple times.
+ mark_as_imported(object)
+ end
+ end
+ end
+
+ # Returns true if the given object has already been imported, false
+ # otherwise.
+ #
+ # object - The object to check.
+ def already_imported?(object)
+ id = id_for_already_imported_cache(object)
+
+ Caching.set_includes?(already_imported_cache_key, id)
+ end
+
+ # Marks the given object as "already imported".
+ def mark_as_imported(object)
+ id = id_for_already_imported_cache(object)
+
+ Caching.set_add(already_imported_cache_key, id)
+ end
+
+ # Returns the ID to use for the cache used for checking if an object has
+ # already been imported or not.
+ #
+ # object - The object we may want to import.
+ def id_for_already_imported_cache(object)
+ raise NotImplementedError
+ end
+
+ # The class used for converting API responses to Hashes when performing
+ # the import.
+ def representation_class
+ raise NotImplementedError
+ end
+
+ # The class to use for importing objects when importing them sequentially.
+ def importer_class
+ raise NotImplementedError
+ end
+
+ # The Sidekiq worker class used for scheduling the importing of objects in
+ # parallel.
+ def sidekiq_worker_class
+ raise NotImplementedError
+ end
+
+ # The name of the method to call to retrieve the data to import.
+ def collection_method
+ raise NotImplementedError
+ end
+
+ # Any options to be passed to the method used for retrieving the data to
+ # import.
+ def collection_options
+ {}
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/rate_limit_error.rb b/lib/gitlab/github_import/rate_limit_error.rb
new file mode 100644
index 00000000000..cc2de909c29
--- /dev/null
+++ b/lib/gitlab/github_import/rate_limit_error.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ # Error that will be raised when we're about to reach (or have reached) the
+ # GitHub API's rate limit.
+ RateLimitError = Class.new(StandardError)
+ end
+end
diff --git a/lib/gitlab/github_import/representation.rb b/lib/gitlab/github_import/representation.rb
new file mode 100644
index 00000000000..639477ef2a2
--- /dev/null
+++ b/lib/gitlab/github_import/representation.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Representation
+ TIMESTAMP_KEYS = %i[created_at updated_at merged_at].freeze
+
+ # Converts a Hash with String based keys to one that can be used by the
+ # various Representation classes.
+ #
+ # Example:
+ #
+ # Representation.symbolize_hash('number' => 10) # => { number: 10 }
+ def self.symbolize_hash(raw_hash = nil)
+ hash = raw_hash.deep_symbolize_keys
+
+ TIMESTAMP_KEYS.each do |key|
+ hash[key] = Time.parse(hash[key]) if hash[key].is_a?(String)
+ end
+
+ hash
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/representation/diff_note.rb b/lib/gitlab/github_import/representation/diff_note.rb
new file mode 100644
index 00000000000..be1334ca98a
--- /dev/null
+++ b/lib/gitlab/github_import/representation/diff_note.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Representation
+ class DiffNote
+ include ToHash
+ include ExposeAttribute
+
+ attr_reader :attributes
+
+ expose_attribute :noteable_type, :noteable_id, :commit_id, :file_path,
+ :diff_hunk, :author, :note, :created_at, :updated_at,
+ :github_id
+
+ NOTEABLE_ID_REGEX = %r{/pull/(?<iid>\d+)}i
+
+ # Builds a diff note from a GitHub API response.
+ #
+ # note - An instance of `Sawyer::Resource` containing the note details.
+ def self.from_api_response(note)
+ matches = note.html_url.match(NOTEABLE_ID_REGEX)
+
+ unless matches
+ raise(
+ ArgumentError,
+ "The note URL #{note.html_url.inspect} is not supported"
+ )
+ end
+
+ user = Representation::User.from_api_response(note.user) if note.user
+ hash = {
+ noteable_type: 'MergeRequest',
+ noteable_id: matches[:iid].to_i,
+ file_path: note.path,
+ commit_id: note.commit_id,
+ diff_hunk: note.diff_hunk,
+ author: user,
+ note: note.body,
+ created_at: note.created_at,
+ updated_at: note.updated_at,
+ github_id: note.id
+ }
+
+ new(hash)
+ end
+
+ # Builds a new note using a Hash that was built from a JSON payload.
+ def self.from_json_hash(raw_hash)
+ hash = Representation.symbolize_hash(raw_hash)
+ hash[:author] &&= Representation::User.from_json_hash(hash[:author])
+
+ new(hash)
+ end
+
+ # attributes - A Hash containing the raw note details. The keys of this
+ # Hash must be Symbols.
+ def initialize(attributes)
+ @attributes = attributes
+ end
+
+ def line_code
+ diff_line = Gitlab::Diff::Parser.new.parse(diff_hunk.lines).to_a.last
+
+ Gitlab::Git
+ .diff_line_code(file_path, diff_line.new_pos, diff_line.old_pos)
+ end
+
+ # Returns a Hash that can be used to populate `notes.st_diff`, removing
+ # the need for requesting Git data for every diff note.
+ def diff_hash
+ {
+ diff: diff_hunk,
+ new_path: file_path,
+ old_path: file_path,
+
+ # These fields are not displayed for LegacyDiffNote notes, so it
+ # doesn't really matter what we set them to.
+ a_mode: '100644',
+ b_mode: '100644',
+ new_file: false
+ }
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/representation/expose_attribute.rb b/lib/gitlab/github_import/representation/expose_attribute.rb
new file mode 100644
index 00000000000..c3405759631
--- /dev/null
+++ b/lib/gitlab/github_import/representation/expose_attribute.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Representation
+ module ExposeAttribute
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Defines getter methods for the given attribute names.
+ #
+ # Example:
+ #
+ # expose_attribute :iid, :title
+ def expose_attribute(*names)
+ names.each do |name|
+ name = name.to_sym
+
+ define_method(name) { attributes[name] }
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/representation/issue.rb b/lib/gitlab/github_import/representation/issue.rb
new file mode 100644
index 00000000000..f3071b3e2b3
--- /dev/null
+++ b/lib/gitlab/github_import/representation/issue.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Representation
+ class Issue
+ include ToHash
+ include ExposeAttribute
+
+ attr_reader :attributes
+
+ expose_attribute :iid, :title, :description, :milestone_number,
+ :created_at, :updated_at, :state, :assignees,
+ :label_names, :author
+
+ # Builds an issue from a GitHub API response.
+ #
+ # issue - An instance of `Sawyer::Resource` containing the issue
+ # details.
+ def self.from_api_response(issue)
+ user =
+ if issue.user
+ Representation::User.from_api_response(issue.user)
+ end
+
+ hash = {
+ iid: issue.number,
+ title: issue.title,
+ description: issue.body,
+ milestone_number: issue.milestone&.number,
+ state: issue.state == 'open' ? :opened : :closed,
+ assignees: issue.assignees.map do |u|
+ Representation::User.from_api_response(u)
+ end,
+ label_names: issue.labels.map(&:name),
+ author: user,
+ created_at: issue.created_at,
+ updated_at: issue.updated_at,
+ pull_request: issue.pull_request ? true : false
+ }
+
+ new(hash)
+ end
+
+ # Builds a new issue using a Hash that was built from a JSON payload.
+ def self.from_json_hash(raw_hash)
+ hash = Representation.symbolize_hash(raw_hash)
+
+ hash[:state] = hash[:state].to_sym
+ hash[:assignees].map! { |u| Representation::User.from_json_hash(u) }
+ hash[:author] &&= Representation::User.from_json_hash(hash[:author])
+
+ new(hash)
+ end
+
+ # attributes - A hash containing the raw issue details. The keys of this
+ # Hash (and any nested hashes) must be symbols.
+ def initialize(attributes)
+ @attributes = attributes
+ end
+
+ def truncated_title
+ title.truncate(255)
+ end
+
+ def labels?
+ label_names && label_names.any?
+ end
+
+ def pull_request?
+ attributes[:pull_request]
+ end
+
+ def issuable_type
+ pull_request? ? 'MergeRequest' : 'Issue'
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/representation/note.rb b/lib/gitlab/github_import/representation/note.rb
new file mode 100644
index 00000000000..070e3b2db8d
--- /dev/null
+++ b/lib/gitlab/github_import/representation/note.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Representation
+ class Note
+ include ToHash
+ include ExposeAttribute
+
+ attr_reader :attributes
+
+ expose_attribute :noteable_id, :noteable_type, :author, :note,
+ :created_at, :updated_at, :github_id
+
+ NOTEABLE_TYPE_REGEX = %r{/(?<type>(pull|issues))/(?<iid>\d+)}i
+
+ # Builds a note from a GitHub API response.
+ #
+ # note - An instance of `Sawyer::Resource` containing the note details.
+ def self.from_api_response(note)
+ matches = note.html_url.match(NOTEABLE_TYPE_REGEX)
+
+ if !matches || !matches[:type]
+ raise(
+ ArgumentError,
+ "The note URL #{note.html_url.inspect} is not supported"
+ )
+ end
+
+ noteable_type =
+ if matches[:type] == 'pull'
+ 'MergeRequest'
+ else
+ 'Issue'
+ end
+
+ user = Representation::User.from_api_response(note.user) if note.user
+ hash = {
+ noteable_type: noteable_type,
+ noteable_id: matches[:iid].to_i,
+ author: user,
+ note: note.body,
+ created_at: note.created_at,
+ updated_at: note.updated_at,
+ github_id: note.id
+ }
+
+ new(hash)
+ end
+
+ # Builds a new note using a Hash that was built from a JSON payload.
+ def self.from_json_hash(raw_hash)
+ hash = Representation.symbolize_hash(raw_hash)
+
+ hash[:author] &&= Representation::User.from_json_hash(hash[:author])
+
+ new(hash)
+ end
+
+ # attributes - A Hash containing the raw note details. The keys of this
+ # Hash must be Symbols.
+ def initialize(attributes)
+ @attributes = attributes
+ end
+
+ alias_method :issuable_type, :noteable_type
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/representation/pull_request.rb b/lib/gitlab/github_import/representation/pull_request.rb
new file mode 100644
index 00000000000..593b491a837
--- /dev/null
+++ b/lib/gitlab/github_import/representation/pull_request.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Representation
+ class PullRequest
+ include ToHash
+ include ExposeAttribute
+
+ attr_reader :attributes
+
+ expose_attribute :iid, :title, :description, :source_branch,
+ :source_branch_sha, :target_branch, :target_branch_sha,
+ :milestone_number, :author, :assignee, :created_at,
+ :updated_at, :merged_at, :source_repository_id,
+ :target_repository_id, :source_repository_owner
+
+ # Builds a PR from a GitHub API response.
+ #
+ # issue - An instance of `Sawyer::Resource` containing the PR details.
+ def self.from_api_response(pr)
+ assignee =
+ if pr.assignee
+ Representation::User.from_api_response(pr.assignee)
+ end
+
+ user = Representation::User.from_api_response(pr.user) if pr.user
+ hash = {
+ iid: pr.number,
+ title: pr.title,
+ description: pr.body,
+ source_branch: pr.head.ref,
+ target_branch: pr.base.ref,
+ source_branch_sha: pr.head.sha,
+ target_branch_sha: pr.base.sha,
+ source_repository_id: pr.head&.repo&.id,
+ target_repository_id: pr.base&.repo&.id,
+ source_repository_owner: pr.head&.user&.login,
+ state: pr.state == 'open' ? :opened : :closed,
+ milestone_number: pr.milestone&.number,
+ author: user,
+ assignee: assignee,
+ created_at: pr.created_at,
+ updated_at: pr.updated_at,
+ merged_at: pr.merged_at
+ }
+
+ new(hash)
+ end
+
+ # Builds a new PR using a Hash that was built from a JSON payload.
+ def self.from_json_hash(raw_hash)
+ hash = Representation.symbolize_hash(raw_hash)
+
+ hash[:state] = hash[:state].to_sym
+ hash[:author] &&= Representation::User.from_json_hash(hash[:author])
+
+ # Assignees are optional so we only convert it from a Hash if one was
+ # set.
+ hash[:assignee] &&= Representation::User
+ .from_json_hash(hash[:assignee])
+
+ new(hash)
+ end
+
+ # attributes - A Hash containing the raw PR details. The keys of this
+ # Hash (and any nested hashes) must be symbols.
+ def initialize(attributes)
+ @attributes = attributes
+ end
+
+ def truncated_title
+ title.truncate(255)
+ end
+
+ # Returns a formatted source branch.
+ #
+ # For cross-project pull requests the branch name will be in the format
+ # `owner-name:branch-name`.
+ def formatted_source_branch
+ if cross_project? && source_repository_owner
+ "#{source_repository_owner}:#{source_branch}"
+ elsif source_branch == target_branch
+ # Sometimes the source and target branch are the same, but GitLab
+ # doesn't support this. This can happen when both the user and
+ # source repository have been deleted, and the PR was submitted from
+ # the fork's master branch.
+ "#{source_branch}-#{iid}"
+ else
+ source_branch
+ end
+ end
+
+ def state
+ if merged_at
+ :merged
+ else
+ attributes[:state]
+ end
+ end
+
+ def cross_project?
+ return true unless source_repository_id
+
+ source_repository_id != target_repository_id
+ end
+
+ def issuable_type
+ 'MergeRequest'
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/representation/to_hash.rb b/lib/gitlab/github_import/representation/to_hash.rb
new file mode 100644
index 00000000000..4a0f36ab8f0
--- /dev/null
+++ b/lib/gitlab/github_import/representation/to_hash.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Representation
+ module ToHash
+ # Converts the current representation to a Hash. The keys of this Hash
+ # will be Symbols.
+ def to_hash
+ hash = {}
+
+ attributes.each do |key, value|
+ hash[key] = convert_value_for_to_hash(value)
+ end
+
+ hash
+ end
+
+ def convert_value_for_to_hash(value)
+ if value.is_a?(Array)
+ value.map { |v| convert_value_for_to_hash(v) }
+ elsif value.respond_to?(:to_hash)
+ value.to_hash
+ else
+ value
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/representation/user.rb b/lib/gitlab/github_import/representation/user.rb
new file mode 100644
index 00000000000..e00dcfca33d
--- /dev/null
+++ b/lib/gitlab/github_import/representation/user.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Representation
+ class User
+ include ToHash
+ include ExposeAttribute
+
+ attr_reader :attributes
+
+ expose_attribute :id, :login
+
+ # Builds a user from a GitHub API response.
+ #
+ # user - An instance of `Sawyer::Resource` containing the user details.
+ def self.from_api_response(user)
+ new(id: user.id, login: user.login)
+ end
+
+ # Builds a user using a Hash that was built from a JSON payload.
+ def self.from_json_hash(raw_hash)
+ new(Representation.symbolize_hash(raw_hash))
+ end
+
+ # attributes - A Hash containing the user details. The keys of this
+ # Hash (and any nested hashes) must be symbols.
+ def initialize(attributes)
+ @attributes = attributes
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/sequential_importer.rb b/lib/gitlab/github_import/sequential_importer.rb
new file mode 100644
index 00000000000..4f7324536a0
--- /dev/null
+++ b/lib/gitlab/github_import/sequential_importer.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ # The SequentialImporter imports a GitHub project in a single thread,
+ # without using Sidekiq. This makes it useful for testing purposes as well
+ # as Rake tasks, but it should be avoided for anything else in favour of the
+ # parallel importer.
+ class SequentialImporter
+ attr_reader :project, :client
+
+ SEQUENTIAL_IMPORTERS = [
+ Importer::LabelsImporter,
+ Importer::MilestonesImporter,
+ Importer::ReleasesImporter
+ ].freeze
+
+ PARALLEL_IMPORTERS = [
+ Importer::PullRequestsImporter,
+ Importer::IssuesImporter,
+ Importer::DiffNotesImporter,
+ Importer::NotesImporter
+ ].freeze
+
+ # project - The project to import the data into.
+ # token - The token to use for the GitHub API.
+ def initialize(project, token: nil)
+ @project = project
+ @client = GithubImport
+ .new_client_for(project, token: token, parallel: false)
+ end
+
+ def execute
+ Importer::RepositoryImporter.new(project, client).execute
+
+ SEQUENTIAL_IMPORTERS.each do |klass|
+ klass.new(project, client).execute
+ end
+
+ PARALLEL_IMPORTERS.each do |klass|
+ klass.new(project, client, parallel: false).execute
+ end
+
+ project.repository.after_import
+
+ true
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/user_finder.rb b/lib/gitlab/github_import/user_finder.rb
new file mode 100644
index 00000000000..be1259662a7
--- /dev/null
+++ b/lib/gitlab/github_import/user_finder.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ # Class that can be used for finding a GitLab user ID based on a GitHub user
+ # ID or username.
+ #
+ # Any found user IDs are cached in Redis to reduce the number of SQL queries
+ # executed over time. Valid keys are refreshed upon access so frequently
+ # used keys stick around.
+ #
+ # Lookups are cached even if no ID was found to remove the need for querying
+ # the database when most queries are not going to return results anyway.
+ class UserFinder
+ attr_reader :project, :client
+
+ # The base cache key to use for caching user IDs for a given GitHub user
+ # ID.
+ ID_CACHE_KEY = 'github-import/user-finder/user-id/%s'.freeze
+
+ # The base cache key to use for caching user IDs for a given GitHub email
+ # address.
+ ID_FOR_EMAIL_CACHE_KEY =
+ 'github-import/user-finder/id-for-email/%s'.freeze
+
+ # The base cache key to use for caching the Email addresses of GitHub
+ # usernames.
+ EMAIL_FOR_USERNAME_CACHE_KEY =
+ 'github-import/user-finder/email-for-username/%s'.freeze
+
+ # project - An instance of `Project`
+ # client - An instance of `Gitlab::GithubImport::Client`
+ def initialize(project, client)
+ @project = project
+ @client = client
+ end
+
+ # Returns the GitLab user ID of an object's author.
+ #
+ # If the object has no author ID we'll use the ID of the GitLab ghost
+ # user.
+ def author_id_for(object)
+ id =
+ if object&.author
+ user_id_for(object.author)
+ else
+ GithubImport.ghost_user_id
+ end
+
+ if id
+ [id, true]
+ else
+ [project.creator_id, false]
+ end
+ end
+
+ # Returns the GitLab user ID of an issuable's assignee.
+ def assignee_id_for(issuable)
+ user_id_for(issuable.assignee) if issuable.assignee
+ end
+
+ # Returns the GitLab user ID for a GitHub user.
+ #
+ # user - An instance of `Gitlab::GithubImport::Representation::User`.
+ def user_id_for(user)
+ find(user.id, user.login)
+ end
+
+ # Returns the GitLab ID for the given GitHub ID or username.
+ #
+ # id - The ID of the GitHub user.
+ # username - The username of the GitHub user.
+ def find(id, username)
+ email = email_for_github_username(username)
+ cached, found_id = find_from_cache(id, email)
+
+ return found_id if found_id
+
+ # We only want to query the database if necessary. If previous lookups
+ # didn't yield a user ID we won't query the database again until the
+ # keys expire.
+ find_id_from_database(id, email) unless cached
+ end
+
+ # Finds a user ID from the cache for a given GitHub ID or Email.
+ def find_from_cache(id, email = nil)
+ id_exists, id_for_github_id = cached_id_for_github_id(id)
+
+ return [id_exists, id_for_github_id] if id_for_github_id
+
+ # Just in case no Email address could be retrieved (for whatever reason)
+ return [false] unless email
+
+ cached_id_for_github_email(email)
+ end
+
+ # Finds a GitLab user ID from the database for a given GitHub user ID or
+ # Email.
+ def find_id_from_database(id, email)
+ id_for_github_id(id) || id_for_github_email(email)
+ end
+
+ def email_for_github_username(username)
+ cache_key = EMAIL_FOR_USERNAME_CACHE_KEY % username
+ email = Caching.read(cache_key)
+
+ unless email
+ user = client.user(username)
+ email = Caching.write(cache_key, user.email) if user
+ end
+
+ email
+ end
+
+ def cached_id_for_github_id(id)
+ read_id_from_cache(ID_CACHE_KEY % id)
+ end
+
+ def cached_id_for_github_email(email)
+ read_id_from_cache(ID_FOR_EMAIL_CACHE_KEY % email)
+ end
+
+ # Queries and caches the GitLab user ID for a GitHub user ID, if one was
+ # found.
+ def id_for_github_id(id)
+ gitlab_id = query_id_for_github_id(id) || nil
+
+ Caching.write(ID_CACHE_KEY % id, gitlab_id)
+ end
+
+ # Queries and caches the GitLab user ID for a GitHub email, if one was
+ # found.
+ def id_for_github_email(email)
+ gitlab_id = query_id_for_github_email(email) || nil
+
+ Caching.write(ID_FOR_EMAIL_CACHE_KEY % email, gitlab_id)
+ end
+
+ def query_id_for_github_id(id)
+ User.for_github_id(id).pluck(:id).first
+ end
+
+ def query_id_for_github_email(email)
+ User.by_any_email(email).pluck(:id).first
+ end
+
+ # Reads an ID from the cache.
+ #
+ # The return value is an Array with two values:
+ #
+ # 1. A boolean indicating if the key was present or not.
+ # 2. The ID as an Integer, or nil in case no ID could be found.
+ def read_id_from_cache(key)
+ value = Caching.read(key)
+ exists = !value.nil?
+ number = value.to_i
+
+ # The cache key may be empty to indicate a previously looked up user for
+ # which we couldn't find an ID.
+ [exists, number.positive? ? number : nil]
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/gitlab_import/client.rb b/lib/gitlab/gitlab_import/client.rb
index f1007daab5d..075b3982608 100644
--- a/lib/gitlab/gitlab_import/client.rb
+++ b/lib/gitlab/gitlab_import/client.rb
@@ -65,6 +65,7 @@ module Gitlab
y << item
end
break if items.empty? || items.size < per_page
+
page += 1
end
end
diff --git a/lib/gitlab/gon_helper.rb b/lib/gitlab/gon_helper.rb
index 3a666c2268b..a7e055ac444 100644
--- a/lib/gitlab/gon_helper.rb
+++ b/lib/gitlab/gon_helper.rb
@@ -3,24 +3,24 @@
module Gitlab
module GonHelper
include WebpackHelper
- include Gitlab::CurrentSettings
def add_gon_variables
gon.api_version = 'v4'
gon.default_avatar_url = URI.join(Gitlab.config.gitlab.url, ActionController::Base.helpers.image_path('no_avatar.png')).to_s
- gon.max_file_size = current_application_settings.max_attachment_size
+ gon.max_file_size = Gitlab::CurrentSettings.max_attachment_size
gon.asset_host = ActionController::Base.asset_host
gon.webpack_public_path = webpack_public_path
gon.relative_url_root = Gitlab.config.gitlab.relative_url_root
gon.shortcuts_path = help_page_path('shortcuts')
gon.user_color_scheme = Gitlab::ColorSchemes.for_user(current_user).css_class
- gon.katex_css_url = ActionController::Base.helpers.asset_path('katex.css')
- gon.katex_js_url = ActionController::Base.helpers.asset_path('katex.js')
- gon.sentry_dsn = current_application_settings.clientside_sentry_dsn if current_application_settings.clientside_sentry_enabled
+ gon.sentry_dsn = Gitlab::CurrentSettings.clientside_sentry_dsn if Gitlab::CurrentSettings.clientside_sentry_enabled
gon.gitlab_url = Gitlab.config.gitlab.url
gon.revision = Gitlab::REVISION
gon.gitlab_logo = ActionController::Base.helpers.asset_path('gitlab_logo.png')
- gon.sprite_icons = ActionController::Base.helpers.asset_path('icons.svg')
+ gon.sprite_icons = IconsHelper.sprite_icon_path
+ gon.sprite_file_icons = IconsHelper.sprite_file_icons_path
+ gon.test_env = Rails.env.test?
+ gon.suggested_label_colors = LabelsHelper.suggested_colors
if current_user
gon.current_user_id = current_user.id
diff --git a/lib/gitlab/google_code_import/importer.rb b/lib/gitlab/google_code_import/importer.rb
index ab38c0c3e34..46b49128140 100644
--- a/lib/gitlab/google_code_import/importer.rb
+++ b/lib/gitlab/google_code_import/importer.rb
@@ -302,6 +302,7 @@ module Gitlab
else
"#{project.namespace.full_path}/#{name}##{id}"
end
+
text = "~~#{text}~~" if deleted
text
end
@@ -329,6 +330,7 @@ module Gitlab
if content.blank?
content = "*(No comment has been entered for this change)*"
end
+
body << content
if updates.any?
@@ -352,6 +354,7 @@ module Gitlab
if content.blank?
content = "*(No description has been entered for this issue)*"
end
+
body << content
if attachments.any?
diff --git a/lib/gitlab/gpg.rb b/lib/gitlab/gpg.rb
index 0d5039ddf5f..413872d7e08 100644
--- a/lib/gitlab/gpg.rb
+++ b/lib/gitlab/gpg.rb
@@ -34,6 +34,21 @@ module Gitlab
end
end
+ def subkeys_from_key(key)
+ using_tmp_keychain do
+ fingerprints = CurrentKeyChain.fingerprints_from_key(key)
+ raw_keys = GPGME::Key.find(:public, fingerprints)
+
+ raw_keys.each_with_object({}) do |raw_key, grouped_subkeys|
+ primary_subkey_id = raw_key.primary_subkey.keyid
+
+ grouped_subkeys[primary_subkey_id] = raw_key.subkeys[1..-1].map do |s|
+ { keyid: s.keyid, fingerprint: s.fingerprint }
+ end
+ end
+ end
+ end
+
def user_infos_from_key(key)
using_tmp_keychain do
fingerprints = CurrentKeyChain.fingerprints_from_key(key)
diff --git a/lib/gitlab/gpg/commit.rb b/lib/gitlab/gpg/commit.rb
index 86bd9f5b125..6d2278d0876 100644
--- a/lib/gitlab/gpg/commit.rb
+++ b/lib/gitlab/gpg/commit.rb
@@ -1,19 +1,29 @@
module Gitlab
module Gpg
class Commit
+ include Gitlab::Utils::StrongMemoize
+
def initialize(commit)
@commit = commit
- @signature_text, @signed_text =
- begin
- Rugged::Commit.extract_signature(@commit.project.repository.rugged, @commit.sha)
- rescue Rugged::OdbError
- nil
- end
+ repo = commit.project.repository.raw_repository
+ @signature_data = Gitlab::Git::Commit.extract_signature_lazily(repo, commit.sha || commit.id)
+ end
+
+ def signature_text
+ strong_memoize(:signature_text) do
+ @signature_data&.itself && @signature_data[0]
+ end
+ end
+
+ def signed_text
+ strong_memoize(:signed_text) do
+ @signature_data&.itself && @signature_data[1]
+ end
end
def has_signature?
- !!(@signature_text && @signed_text)
+ !!(signature_text && signed_text)
end
def signature
@@ -43,7 +53,9 @@ module Gitlab
# key belonging to the keyid.
# This way we can add the key to the temporary keychain and extract
# the proper signature.
- gpg_key = GpgKey.find_by(primary_keyid: verified_signature.fingerprint)
+ # NOTE: the invoked method is #fingerprint but it's only returning
+ # 16 characters (the format used by keyid) instead of 40.
+ gpg_key = find_gpg_key(verified_signature.fingerprint)
if gpg_key
Gitlab::Gpg::CurrentKeyChain.add(gpg_key.key)
@@ -55,14 +67,16 @@ module Gitlab
end
def verified_signature
- @verified_signature ||= GPGME::Crypto.new.verify(@signature_text, signed_text: @signed_text) do |verified_signature|
+ @verified_signature ||= GPGME::Crypto.new.verify(signature_text, signed_text: signed_text) do |verified_signature|
break verified_signature
end
end
def create_cached_signature!
using_keychain do |gpg_key|
- GpgSignature.create!(attributes(gpg_key))
+ signature = GpgSignature.new(attributes(gpg_key))
+ signature.save! unless Gitlab::Database.read_only?
+ signature
end
end
@@ -74,7 +88,7 @@ module Gitlab
commit_sha: @commit.sha,
project: @commit.project,
gpg_key: gpg_key,
- gpg_key_primary_keyid: gpg_key&.primary_keyid || verified_signature.fingerprint,
+ gpg_key_primary_keyid: gpg_key&.keyid || verified_signature.fingerprint,
gpg_key_user_name: user_infos[:name],
gpg_key_user_email: user_infos[:email],
verification_status: verification_status
@@ -98,6 +112,10 @@ module Gitlab
def user_infos(gpg_key)
gpg_key&.verified_user_infos&.first || gpg_key&.user_infos&.first || {}
end
+
+ def find_gpg_key(keyid)
+ GpgKey.find_by(primary_keyid: keyid) || GpgKeySubkey.find_by(keyid: keyid)
+ end
end
end
end
diff --git a/lib/gitlab/gpg/invalid_gpg_signature_updater.rb b/lib/gitlab/gpg/invalid_gpg_signature_updater.rb
index e085eab26c9..1991911ef6a 100644
--- a/lib/gitlab/gpg/invalid_gpg_signature_updater.rb
+++ b/lib/gitlab/gpg/invalid_gpg_signature_updater.rb
@@ -9,8 +9,8 @@ module Gitlab
GpgSignature
.select(:id, :commit_sha, :project_id)
.where('gpg_key_id IS NULL OR verification_status <> ?', GpgSignature.verification_statuses[:verified])
- .where(gpg_key_primary_keyid: @gpg_key.primary_keyid)
- .find_each { |sig| sig.gpg_commit.update_signature!(sig) }
+ .where(gpg_key_primary_keyid: @gpg_key.keyids)
+ .find_each { |sig| sig.gpg_commit&.update_signature!(sig) }
end
end
end
diff --git a/lib/gitlab/grape_logging/loggers/user_logger.rb b/lib/gitlab/grape_logging/loggers/user_logger.rb
new file mode 100644
index 00000000000..fa172861967
--- /dev/null
+++ b/lib/gitlab/grape_logging/loggers/user_logger.rb
@@ -0,0 +1,18 @@
+# This grape_logging module (https://github.com/aserafin/grape_logging) makes it
+# possible to log the user who performed the Grape API action by retrieving
+# the user context from the request environment.
+module Gitlab
+ module GrapeLogging
+ module Loggers
+ class UserLogger < ::GrapeLogging::Loggers::Base
+ def parameters(request, _)
+ params = request.env[::API::Helpers::API_USER_ENV]
+
+ return {} unless params
+
+ params.slice(:user_id, :username)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/group_hierarchy.rb b/lib/gitlab/group_hierarchy.rb
index 635f52131f9..42ded7c286f 100644
--- a/lib/gitlab/group_hierarchy.rb
+++ b/lib/gitlab/group_hierarchy.rb
@@ -17,12 +17,32 @@ module Gitlab
@model = ancestors_base.model
end
+ # Returns the set of descendants of a given relation, but excluding the given
+ # relation
+ def descendants
+ base_and_descendants.where.not(id: descendants_base.select(:id))
+ end
+
+ # Returns the set of ancestors of a given relation, but excluding the given
+ # relation
+ #
+ # Passing an `upto` will stop the recursion once the specified parent_id is
+ # reached. So all ancestors *lower* than the specified ancestor will be
+ # included.
+ def ancestors(upto: nil)
+ base_and_ancestors(upto: upto).where.not(id: ancestors_base.select(:id))
+ end
+
# Returns a relation that includes the ancestors_base set of groups
# and all their ancestors (recursively).
- def base_and_ancestors
+ #
+ # Passing an `upto` will stop the recursion once the specified parent_id is
+ # reached. So all ancestors *lower* than the specified acestor will be
+ # included.
+ def base_and_ancestors(upto: nil)
return ancestors_base unless Group.supports_nested_groups?
- read_only(base_and_ancestors_cte.apply_to(model.all))
+ read_only(base_and_ancestors_cte(upto).apply_to(model.all))
end
# Returns a relation that includes the descendants_base set of groups
@@ -78,17 +98,19 @@ module Gitlab
private
- def base_and_ancestors_cte
+ def base_and_ancestors_cte(stop_id = nil)
cte = SQL::RecursiveCTE.new(:base_and_ancestors)
cte << ancestors_base.except(:order)
# Recursively get all the ancestors of the base set.
- cte << model
+ parent_query = model
.from([groups_table, cte.table])
.where(groups_table[:id].eq(cte.table[:parent_id]))
.except(:order)
+ parent_query = parent_query.where(cte.table[:parent_id].not_eq(stop_id)) if stop_id
+ cte << parent_query
cte
end
diff --git a/lib/gitlab/health_checks/fs_shards_check.rb b/lib/gitlab/health_checks/fs_shards_check.rb
index a1905b091b4..afaa59b1018 100644
--- a/lib/gitlab/health_checks/fs_shards_check.rb
+++ b/lib/gitlab/health_checks/fs_shards_check.rb
@@ -125,7 +125,7 @@ module Gitlab
end
def storage_circuitbreaker_test(storage_name)
- Gitlab::Git::Storage::CircuitBreaker.new(storage_name).perform { "OK" }
+ Gitlab::Git::Storage::CircuitBreaker.build(storage_name).perform { "OK" }
rescue Gitlab::Git::Storage::Inaccessible
nil
end
diff --git a/lib/gitlab/health_checks/gitaly_check.rb b/lib/gitlab/health_checks/gitaly_check.rb
new file mode 100644
index 00000000000..11416c002e3
--- /dev/null
+++ b/lib/gitlab/health_checks/gitaly_check.rb
@@ -0,0 +1,53 @@
+module Gitlab
+ module HealthChecks
+ class GitalyCheck
+ extend BaseAbstractCheck
+
+ METRIC_PREFIX = 'gitaly_health_check'.freeze
+
+ class << self
+ def readiness
+ repository_storages.map do |storage_name|
+ check(storage_name)
+ end
+ end
+
+ def metrics
+ repository_storages.flat_map do |storage_name|
+ result, elapsed = with_timing { check(storage_name) }
+ labels = { shard: storage_name }
+
+ [
+ metric("#{metric_prefix}_success", successful?(result) ? 1 : 0, **labels),
+ metric("#{metric_prefix}_latency_seconds", elapsed, **labels)
+ ].flatten
+ end
+ end
+
+ def check(storage_name)
+ serv = Gitlab::GitalyClient::HealthCheckService.new(storage_name)
+ result = serv.check
+ HealthChecks::Result.new(result[:success], result[:message], shard: storage_name)
+ end
+
+ private
+
+ def metric_prefix
+ METRIC_PREFIX
+ end
+
+ def successful?(result)
+ result[:success]
+ end
+
+ def repository_storages
+ storages.keys
+ end
+
+ def storages
+ Gitlab.config.repositories.storages
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/hook_data/issuable_builder.rb b/lib/gitlab/hook_data/issuable_builder.rb
new file mode 100644
index 00000000000..4febb0ab430
--- /dev/null
+++ b/lib/gitlab/hook_data/issuable_builder.rb
@@ -0,0 +1,56 @@
+module Gitlab
+ module HookData
+ class IssuableBuilder
+ CHANGES_KEYS = %i[previous current].freeze
+
+ attr_accessor :issuable
+
+ def initialize(issuable)
+ @issuable = issuable
+ end
+
+ def build(user: nil, changes: {})
+ hook_data = {
+ object_kind: issuable.class.name.underscore,
+ user: user.hook_attrs,
+ project: issuable.project.hook_attrs,
+ object_attributes: issuable.hook_attrs,
+ labels: issuable.labels.map(&:hook_attrs),
+ changes: final_changes(changes.slice(*safe_keys)),
+ # DEPRECATED
+ repository: issuable.project.hook_attrs.slice(:name, :url, :description, :homepage)
+ }
+
+ if issuable.is_a?(Issue)
+ hook_data[:assignees] = issuable.assignees.map(&:hook_attrs) if issuable.assignees.any?
+ else
+ hook_data[:assignee] = issuable.assignee.hook_attrs if issuable.assignee
+ end
+
+ hook_data
+ end
+
+ def safe_keys
+ issuable_builder::SAFE_HOOK_ATTRIBUTES + issuable_builder::SAFE_HOOK_RELATIONS
+ end
+
+ private
+
+ def issuable_builder
+ case issuable
+ when Issue
+ Gitlab::HookData::IssueBuilder
+ when MergeRequest
+ Gitlab::HookData::MergeRequestBuilder
+ end
+ end
+
+ def final_changes(changes_hash)
+ changes_hash.reduce({}) do |hash, (key, changes_array)|
+ hash[key] = Hash[CHANGES_KEYS.zip(changes_array)]
+ hash
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/hook_data/issue_builder.rb b/lib/gitlab/hook_data/issue_builder.rb
new file mode 100644
index 00000000000..f9b1a3caf5e
--- /dev/null
+++ b/lib/gitlab/hook_data/issue_builder.rb
@@ -0,0 +1,54 @@
+module Gitlab
+ module HookData
+ class IssueBuilder
+ SAFE_HOOK_ATTRIBUTES = %i[
+ assignee_id
+ author_id
+ closed_at
+ confidential
+ created_at
+ description
+ due_date
+ id
+ iid
+ last_edited_at
+ last_edited_by_id
+ milestone_id
+ moved_to_id
+ project_id
+ relative_position
+ state
+ time_estimate
+ title
+ updated_at
+ updated_by_id
+ ].freeze
+
+ SAFE_HOOK_RELATIONS = %i[
+ assignees
+ labels
+ total_time_spent
+ ].freeze
+
+ attr_accessor :issue
+
+ def initialize(issue)
+ @issue = issue
+ end
+
+ def build
+ attrs = {
+ url: Gitlab::UrlBuilder.build(issue),
+ total_time_spent: issue.total_time_spent,
+ human_total_time_spent: issue.human_total_time_spent,
+ human_time_estimate: issue.human_time_estimate,
+ assignee_ids: issue.assignee_ids,
+ assignee_id: issue.assignee_ids.first # This key is deprecated
+ }
+
+ issue.attributes.with_indifferent_access.slice(*SAFE_HOOK_ATTRIBUTES)
+ .merge!(attrs)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/hook_data/merge_request_builder.rb b/lib/gitlab/hook_data/merge_request_builder.rb
new file mode 100644
index 00000000000..aff786864f2
--- /dev/null
+++ b/lib/gitlab/hook_data/merge_request_builder.rb
@@ -0,0 +1,61 @@
+module Gitlab
+ module HookData
+ class MergeRequestBuilder
+ SAFE_HOOK_ATTRIBUTES = %i[
+ assignee_id
+ author_id
+ created_at
+ description
+ head_pipeline_id
+ id
+ iid
+ last_edited_at
+ last_edited_by_id
+ merge_commit_sha
+ merge_error
+ merge_params
+ merge_status
+ merge_user_id
+ merge_when_pipeline_succeeds
+ milestone_id
+ source_branch
+ source_project_id
+ state
+ target_branch
+ target_project_id
+ time_estimate
+ title
+ updated_at
+ updated_by_id
+ ].freeze
+
+ SAFE_HOOK_RELATIONS = %i[
+ assignee
+ labels
+ total_time_spent
+ ].freeze
+
+ attr_accessor :merge_request
+
+ def initialize(merge_request)
+ @merge_request = merge_request
+ end
+
+ def build
+ attrs = {
+ url: Gitlab::UrlBuilder.build(merge_request),
+ source: merge_request.source_project.try(:hook_attrs),
+ target: merge_request.target_project.hook_attrs,
+ last_commit: merge_request.diff_head_commit&.hook_attrs,
+ work_in_progress: merge_request.work_in_progress?,
+ total_time_spent: merge_request.total_time_spent,
+ human_total_time_spent: merge_request.human_total_time_spent,
+ human_time_estimate: merge_request.human_time_estimate
+ }
+
+ merge_request.attributes.with_indifferent_access.slice(*SAFE_HOOK_ATTRIBUTES)
+ .merge!(attrs)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/i18n.rb b/lib/gitlab/i18n.rb
index bdc0f04b56b..3772ef11c7f 100644
--- a/lib/gitlab/i18n.rb
+++ b/lib/gitlab/i18n.rb
@@ -18,7 +18,10 @@ module Gitlab
'uk' => 'Українська',
'ja' => '日本語',
'ko' => '한국어',
- 'nl_NL' => 'Nederlands'
+ 'nl_NL' => 'Nederlands',
+ 'tr_TR' => 'Türkçe',
+ 'id_ID' => 'Bahasa Indonesia',
+ 'fil_PH' => 'Filipino'
}.freeze
def available_locales
diff --git a/lib/gitlab/identifier.rb b/lib/gitlab/identifier.rb
index 94678b6ec40..3f3f10596c5 100644
--- a/lib/gitlab/identifier.rb
+++ b/lib/gitlab/identifier.rb
@@ -2,9 +2,8 @@
# key-13 or user-36 or last commit
module Gitlab
module Identifier
- def identify(identifier, project, newrev)
+ def identify(identifier, project = nil, newrev = nil)
if identifier.blank?
- # Local push from gitlab
identify_using_commit(project, newrev)
elsif identifier =~ /\Auser-\d+\Z/
# git push over http
@@ -17,6 +16,8 @@ module Gitlab
# Tries to identify a user based on a commit SHA.
def identify_using_commit(project, ref)
+ return if project.nil? && ref.nil?
+
commit = project.commit(ref)
return if !commit || !commit.author_email
diff --git a/lib/gitlab/import_export.rb b/lib/gitlab/import_export.rb
index 50ee879129c..b713fa7e1cd 100644
--- a/lib/gitlab/import_export.rb
+++ b/lib/gitlab/import_export.rb
@@ -3,7 +3,7 @@ module Gitlab
extend self
# For every version update, the version history in import_export.md has to be kept up to date.
- VERSION = '0.2.0'.freeze
+ VERSION = '0.2.3'.freeze
FILENAME_LIMIT = 50
def export_path(relative_path:)
diff --git a/lib/gitlab/import_export/command_line_util.rb b/lib/gitlab/import_export/command_line_util.rb
index 90942774a2e..2f163db936b 100644
--- a/lib/gitlab/import_export/command_line_util.rb
+++ b/lib/gitlab/import_export/command_line_util.rb
@@ -11,10 +11,6 @@ module Gitlab
untar_with_options(archive: archive, dir: dir, options: 'zxf')
end
- def git_bundle(repo_path:, bundle_path:)
- execute(%W(#{git_bin_path} --git-dir=#{repo_path} bundle create #{bundle_path} --all))
- end
-
def mkdir_p(path)
FileUtils.mkdir_p(path, mode: DEFAULT_MODE)
FileUtils.chmod(DEFAULT_MODE, path)
@@ -32,7 +28,7 @@ module Gitlab
def execute(cmd)
output, status = Gitlab::Popen.popen(cmd)
- @shared.error(Gitlab::ImportExport::Error.new(output.to_s)) unless status.zero?
+ @shared.error(Gitlab::ImportExport::Error.new(output.to_s)) unless status.zero? # rubocop:disable Gitlab/ModuleWithInstanceVariables
status.zero?
end
diff --git a/lib/gitlab/import_export/file_importer.rb b/lib/gitlab/import_export/file_importer.rb
index 989342389bc..0f4c3498036 100644
--- a/lib/gitlab/import_export/file_importer.rb
+++ b/lib/gitlab/import_export/file_importer.rb
@@ -17,12 +17,16 @@ module Gitlab
def import
mkdir_p(@shared.export_path)
+ remove_symlinks!
+
wait_for_archived_file do
decompress_archive
end
rescue => e
@shared.error(e)
false
+ ensure
+ remove_symlinks!
end
private
@@ -43,7 +47,7 @@ module Gitlab
raise Projects::ImportService::Error.new("Unable to decompress #{@archive_file} into #{@shared.export_path}") unless result
- remove_symlinks!
+ result
end
def remove_symlinks!
@@ -55,7 +59,7 @@ module Gitlab
end
def extracted_files
- Dir.glob("#{@shared.export_path}/**/*", File::FNM_DOTMATCH).reject { |f| f =~ /.*\/\.{1,2}$/ }
+ Dir.glob("#{@shared.export_path}/**/*", File::FNM_DOTMATCH).reject { |f| f =~ %r{.*/\.{1,2}$} }
end
end
end
diff --git a/lib/gitlab/import_export/import_export.yml b/lib/gitlab/import_export/import_export.yml
index 2171c6c7bbb..4bdd01f5e94 100644
--- a/lib/gitlab/import_export/import_export.yml
+++ b/lib/gitlab/import_export/import_export.yml
@@ -19,6 +19,7 @@ project_tree:
- milestone:
- events:
- :push_event_payload
+ - :issue_assignees
- snippets:
- :award_emoji
- notes:
@@ -26,6 +27,8 @@ project_tree:
- :releases
- project_members:
- :user
+ - lfs_file_locks:
+ - :user
- merge_requests:
- notes:
- :author
@@ -48,8 +51,8 @@ project_tree:
- :author
- events:
- :push_event_payload
- - :stages
- - :statuses
+ - stages:
+ - :statuses
- :auto_devops
- :triggers
- :pipeline_schedules
@@ -61,6 +64,8 @@ project_tree:
- protected_tags:
- :create_access_levels
- :project_feature
+ - :custom_attributes
+ - :project_badges
# Only include the following attributes for the models specified.
included_attributes:
@@ -112,6 +117,7 @@ excluded_attributes:
- :milestone_id
- :ref_fetched
- :merge_jid
+ - :latest_merge_request_diff_id
award_emoji:
- :awardable_id
statuses:
@@ -120,6 +126,8 @@ excluded_attributes:
- :when
push_event_payload:
- :event_id
+ project_badges:
+ - :group_id
methods:
labels:
@@ -130,8 +138,6 @@ methods:
- :type
services:
- :type
- merge_request_diff:
- - :utf8_st_diffs
merge_request_diff_files:
- :utf8_diff
merge_requests:
@@ -144,3 +150,5 @@ methods:
- :action
push_event_payload:
- :action
+ project_badges:
+ - :type
diff --git a/lib/gitlab/import_export/importer.rb b/lib/gitlab/import_export/importer.rb
index fbdd74788bc..c38df9102eb 100644
--- a/lib/gitlab/import_export/importer.rb
+++ b/lib/gitlab/import_export/importer.rb
@@ -1,11 +1,15 @@
module Gitlab
module ImportExport
class Importer
+ def self.imports_repository?
+ true
+ end
+
def initialize(project)
@archive_file = project.import_source
@current_user = project.creator
@project = project
- @shared = Gitlab::ImportExport::Shared.new(relative_path: path_with_namespace)
+ @shared = project.import_export_shared
end
def execute
@@ -46,9 +50,10 @@ module Gitlab
end
def wiki_restorer
- Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: wiki_repo_path,
+ Gitlab::ImportExport::WikiRestorer.new(path_to_bundle: wiki_repo_path,
shared: @shared,
- project: ProjectWiki.new(project_tree.restored_project))
+ project: ProjectWiki.new(project_tree.restored_project),
+ wiki_enabled: @project.wiki_enabled?)
end
def uploads_restorer
diff --git a/lib/gitlab/import_export/merge_request_parser.rb b/lib/gitlab/import_export/merge_request_parser.rb
index 81a213e8321..f3d7407383c 100644
--- a/lib/gitlab/import_export/merge_request_parser.rb
+++ b/lib/gitlab/import_export/merge_request_parser.rb
@@ -1,7 +1,7 @@
module Gitlab
module ImportExport
class MergeRequestParser
- FORKED_PROJECT_ID = -1
+ FORKED_PROJECT_ID = nil
def initialize(project, diff_head_sha, merge_request, relation_hash)
@project = project
@@ -26,7 +26,7 @@ module Gitlab
end
def fetch_ref
- @project.repository.fetch_ref(@project.repository.path, @diff_head_sha, @merge_request.source_branch)
+ @project.repository.fetch_ref(@project.repository, source_ref: @diff_head_sha, target_ref: @merge_request.source_branch)
end
def branch_exists?(branch_name)
diff --git a/lib/gitlab/import_export/project_creator.rb b/lib/gitlab/import_export/project_creator.rb
deleted file mode 100644
index 77bb3ca6581..00000000000
--- a/lib/gitlab/import_export/project_creator.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-module Gitlab
- module ImportExport
- class ProjectCreator
- def initialize(namespace_id, current_user, file, project_path)
- @namespace_id = namespace_id
- @current_user = current_user
- @file = file
- @project_path = project_path
- end
-
- def execute
- ::Projects::CreateService.new(
- @current_user,
- name: @project_path,
- path: @project_path,
- namespace_id: @namespace_id,
- import_type: "gitlab_project",
- import_source: @file
- ).execute
- end
- end
- end
-end
diff --git a/lib/gitlab/import_export/project_tree_restorer.rb b/lib/gitlab/import_export/project_tree_restorer.rb
index 3bc095a99a9..4b5f9f3a926 100644
--- a/lib/gitlab/import_export/project_tree_restorer.rb
+++ b/lib/gitlab/import_export/project_tree_restorer.rb
@@ -2,7 +2,7 @@ module Gitlab
module ImportExport
class ProjectTreeRestorer
# Relations which cannot have both group_id and project_id at the same time
- RESTRICT_PROJECT_AND_GROUP = %i(milestones).freeze
+ RESTRICT_PROJECT_AND_GROUP = %i(milestone milestones).freeze
def initialize(user:, shared:, project:)
@path = File.join(shared.export_path, 'project.json')
@@ -60,6 +60,8 @@ module Gitlab
end
end
+ @project.merge_requests.set_latest_merge_request_diff_ids!
+
@saved
end
@@ -146,6 +148,7 @@ module Gitlab
else
relation_hash = relation_item[sub_relation.to_s]
end
+
[relation_hash, sub_relation]
end
diff --git a/lib/gitlab/import_export/relation_factory.rb b/lib/gitlab/import_export/relation_factory.rb
index 380b336395d..cf6b7e306dd 100644
--- a/lib/gitlab/import_export/relation_factory.rb
+++ b/lib/gitlab/import_export/relation_factory.rb
@@ -15,7 +15,9 @@ module Gitlab
labels: :project_labels,
priorities: :label_priorities,
auto_devops: :project_auto_devops,
- label: :project_label }.freeze
+ label: :project_label,
+ custom_attributes: 'ProjectCustomAttribute',
+ project_badges: 'Badge' }.freeze
USER_REFERENCES = %w[author_id assignee_id updated_by_id user_id created_by_id last_edited_by_id merge_user_id resolved_by_id].freeze
@@ -35,7 +37,7 @@ module Gitlab
def initialize(relation_sym:, relation_hash:, members_mapper:, user:, project:)
@relation_name = OVERRIDES[relation_sym] || relation_sym
- @relation_hash = relation_hash.except('noteable_id').merge('project_id' => project.id)
+ @relation_hash = relation_hash.except('noteable_id')
@members_mapper = members_mapper
@user = user
@project = project
@@ -56,22 +58,21 @@ module Gitlab
private
def setup_models
- if @relation_name == :notes
- set_note_author
-
- # attachment is deprecated and note uploads are handled by Markdown uploader
- @relation_hash['attachment'] = nil
+ case @relation_name
+ when :merge_request_diff_files then setup_diff
+ when :notes then setup_note
+ when :project_label, :project_labels then setup_label
+ when :milestone, :milestones then setup_milestone
+ when 'Ci::Pipeline' then setup_pipeline
+ else
+ @relation_hash['project_id'] = @project.id
end
update_user_references
update_project_references
- handle_group_label if group_label?
reset_tokens!
remove_encrypted_attributes!
-
- set_st_diff_commits if @relation_name == :merge_request_diff
- set_diff if @relation_name == :merge_request_diff_files
end
def update_user_references
@@ -82,6 +83,12 @@ module Gitlab
end
end
+ def setup_note
+ set_note_author
+ # attachment is deprecated and note uploads are handled by Markdown uploader
+ @relation_hash['attachment'] = nil
+ end
+
# Sets the author for a note. If the user importing the project
# has admin access, an actual mapping with new project members
# will be used. Otherwise, a note stating the original author name
@@ -107,9 +114,7 @@ module Gitlab
@relation_hash.delete('trace') # old export files have trace
@relation_hash.delete('token')
- imported_object do |object|
- object.commit_id = nil
- end
+ imported_object
elsif @relation_name == :merge_requests
MergeRequestParser.new(@project, @relation_hash.delete('diff_head_sha'), imported_object, @relation_hash).parse!
else
@@ -134,20 +139,25 @@ module Gitlab
@relation_hash['target_project_id'] && @relation_hash['target_project_id'] == @relation_hash['source_project_id']
end
- def group_label?
- @relation_hash['type'] == 'GroupLabel'
- end
-
- def handle_group_label
+ def setup_label
# If there's no group, move the label to a project label
- if @relation_hash['group_id']
+ if @relation_hash['type'] == 'GroupLabel' && @relation_hash['group_id']
@relation_hash['project_id'] = nil
@relation_name = :group_label
else
+ @relation_hash['group_id'] = nil
@relation_hash['type'] = 'ProjectLabel'
end
end
+ def setup_milestone
+ if @relation_hash['group_id']
+ @relation_hash['group_id'] = @project.group.id
+ else
+ @relation_hash['project_id'] = @project.id
+ end
+ end
+
def reset_tokens!
return unless Gitlab::ImportExport.reset_tokens? && TOKEN_RESET_MODELS.include?(@relation_name.to_s)
@@ -171,8 +181,9 @@ module Gitlab
end
def imported_object
- yield(existing_or_new_object) if block_given?
- existing_or_new_object.importing = true if existing_or_new_object.respond_to?(:importing)
+ if existing_or_new_object.respond_to?(:importing)
+ existing_or_new_object.importing = true
+ end
existing_or_new_object
rescue ActiveRecord::RecordNotUnique
@@ -196,15 +207,16 @@ module Gitlab
relation_class: relation_class)
end
- def set_st_diff_commits
- @relation_hash['st_diffs'] = @relation_hash.delete('utf8_st_diffs')
-
- HashUtil.deep_symbolize_array!(@relation_hash['st_diffs'])
- HashUtil.deep_symbolize_array_with_date!(@relation_hash['st_commits'])
+ def setup_diff
+ @relation_hash['diff'] = @relation_hash.delete('utf8_diff')
end
- def set_diff
- @relation_hash['diff'] = @relation_hash.delete('utf8_diff')
+ def setup_pipeline
+ @relation_hash.fetch('stages').each do |stage|
+ stage.statuses.each do |status|
+ status.pipeline = imported_object
+ end
+ end
end
def existing_or_new_object
@@ -248,7 +260,14 @@ module Gitlab
end
def find_or_create_object!
- finder_attributes = @relation_name == :group_label ? %w[title group_id] : %w[title project_id]
+ finder_attributes = if @relation_name == :group_label
+ %w[title group_id]
+ elsif parsed_relation_hash['project_id']
+ %w[title project_id]
+ else
+ %w[title group_id]
+ end
+
finder_hash = parsed_relation_hash.slice(*finder_attributes)
if label?
diff --git a/lib/gitlab/import_export/repo_restorer.rb b/lib/gitlab/import_export/repo_restorer.rb
index 32ca2809b2f..5a9bbceac67 100644
--- a/lib/gitlab/import_export/repo_restorer.rb
+++ b/lib/gitlab/import_export/repo_restorer.rb
@@ -13,7 +13,7 @@ module Gitlab
def restore
return true unless File.exist?(@path_to_bundle)
- gitlab_shell.import_repository(@project.repository_storage_path, @project.disk_path, @path_to_bundle)
+ @project.repository.create_from_bundle(@path_to_bundle)
rescue => e
@shared.error(e)
false
diff --git a/lib/gitlab/import_export/repo_saver.rb b/lib/gitlab/import_export/repo_saver.rb
index a7028a32570..695462c7dd2 100644
--- a/lib/gitlab/import_export/repo_saver.rb
+++ b/lib/gitlab/import_export/repo_saver.rb
@@ -21,7 +21,7 @@ module Gitlab
def bundle_to_disk
mkdir_p(@shared.export_path)
- git_bundle(repo_path: path_to_repo, bundle_path: @full_path)
+ @project.repository.bundle_to_disk(@full_path)
rescue => e
@shared.error(e)
false
diff --git a/lib/gitlab/import_export/saver.rb b/lib/gitlab/import_export/saver.rb
index 6130c124dd1..2daeba90a51 100644
--- a/lib/gitlab/import_export/saver.rb
+++ b/lib/gitlab/import_export/saver.rb
@@ -37,7 +37,7 @@ module Gitlab
end
def archive_file
- @archive_file ||= File.join(@shared.export_path, '..', Gitlab::ImportExport.export_filename(project: @project))
+ @archive_file ||= File.join(@shared.archive_path, Gitlab::ImportExport.export_filename(project: @project))
end
end
end
diff --git a/lib/gitlab/import_export/shared.rb b/lib/gitlab/import_export/shared.rb
index 9fd0b709ef2..3d3d998a6a3 100644
--- a/lib/gitlab/import_export/shared.rb
+++ b/lib/gitlab/import_export/shared.rb
@@ -1,26 +1,47 @@
module Gitlab
module ImportExport
class Shared
- attr_reader :errors, :opts
+ attr_reader :errors, :project
- def initialize(opts)
- @opts = opts
+ def initialize(project)
+ @project = project
@errors = []
end
+ def active_export_count
+ Dir[File.join(archive_path, '*')].count { |name| File.directory?(name) }
+ end
+
def export_path
- @export_path ||= Gitlab::ImportExport.export_path(relative_path: opts[:relative_path])
+ @export_path ||= Gitlab::ImportExport.export_path(relative_path: relative_path)
+ end
+
+ def archive_path
+ @archive_path ||= Gitlab::ImportExport.export_path(relative_path: relative_archive_path)
end
def error(error)
error_out(error.message, caller[0].dup)
@errors << error.message
+
# Debug:
- Rails.logger.error(error.backtrace.join("\n"))
+ if error.backtrace
+ Rails.logger.error("Import/Export backtrace: #{error.backtrace.join("\n")}")
+ else
+ Rails.logger.error("No backtrace found")
+ end
end
private
+ def relative_path
+ File.join(relative_archive_path, SecureRandom.hex)
+ end
+
+ def relative_archive_path
+ @project.disk_path
+ end
+
def error_out(message, caller)
Rails.logger.error("Import/Export error raised on #{caller}: #{message}")
end
diff --git a/lib/gitlab/import_export/uploads_saver.rb b/lib/gitlab/import_export/uploads_saver.rb
index f9ae5079d7c..2f08dda55fd 100644
--- a/lib/gitlab/import_export/uploads_saver.rb
+++ b/lib/gitlab/import_export/uploads_saver.rb
@@ -17,16 +17,13 @@ module Gitlab
false
end
- private
+ def uploads_path
+ FileUploader.absolute_base_dir(@project)
+ end
def uploads_export_path
File.join(@shared.export_path, 'uploads')
end
-
- def uploads_path
- # TODO: decide what to do with uploads. We will use UUIDs here too?
- File.join(Rails.root.join('public/uploads'), @project.path_with_namespace)
- end
end
end
end
diff --git a/lib/gitlab/import_export/wiki_repo_saver.rb b/lib/gitlab/import_export/wiki_repo_saver.rb
index 1e6722a7bba..5fa2e101e29 100644
--- a/lib/gitlab/import_export/wiki_repo_saver.rb
+++ b/lib/gitlab/import_export/wiki_repo_saver.rb
@@ -10,7 +10,7 @@ module Gitlab
def bundle_to_disk(full_path)
mkdir_p(@shared.export_path)
- git_bundle(repo_path: path_to_repo, bundle_path: full_path)
+ @wiki.repository.bundle_to_disk(full_path)
rescue => e
@shared.error(e)
false
diff --git a/lib/gitlab/import_export/wiki_restorer.rb b/lib/gitlab/import_export/wiki_restorer.rb
new file mode 100644
index 00000000000..f33bfb332ab
--- /dev/null
+++ b/lib/gitlab/import_export/wiki_restorer.rb
@@ -0,0 +1,23 @@
+module Gitlab
+ module ImportExport
+ class WikiRestorer < RepoRestorer
+ def initialize(project:, shared:, path_to_bundle:, wiki_enabled:)
+ super(project: project, shared: shared, path_to_bundle: path_to_bundle)
+
+ @wiki_enabled = wiki_enabled
+ end
+
+ def restore
+ @project.wiki if create_empty_wiki?
+
+ super
+ end
+
+ private
+
+ def create_empty_wiki?
+ !File.exist?(@path_to_bundle) && @wiki_enabled
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/import_sources.rb b/lib/gitlab/import_sources.rb
index 5404dc11a87..60d5fa4d29a 100644
--- a/lib/gitlab/import_sources.rb
+++ b/lib/gitlab/import_sources.rb
@@ -7,15 +7,16 @@ module Gitlab
module ImportSources
ImportSource = Struct.new(:name, :title, :importer)
+ # We exclude `bare_repository` here as it has no import class associated
ImportTable = [
- ImportSource.new('github', 'GitHub', Github::Import),
+ ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket', Gitlab::BitbucketImport::Importer),
ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
ImportSource.new('google_code', 'Google Code', Gitlab::GoogleCodeImport::Importer),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
ImportSource.new('git', 'Repo by URL', nil),
ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),
- ImportSource.new('gitea', 'Gitea', Gitlab::GithubImport::Importer)
+ ImportSource.new('gitea', 'Gitea', Gitlab::LegacyGithubImport::Importer)
].freeze
class << self
diff --git a/lib/gitlab/insecure_key_fingerprint.rb b/lib/gitlab/insecure_key_fingerprint.rb
new file mode 100644
index 00000000000..f85b6e9197f
--- /dev/null
+++ b/lib/gitlab/insecure_key_fingerprint.rb
@@ -0,0 +1,23 @@
+module Gitlab
+ #
+ # Calculates the fingerprint of a given key without using
+ # openssh key validations. For this reason, only use
+ # for calculating the fingerprint to find the key with it.
+ #
+ # DO NOT use it for checking the validity of a ssh key.
+ #
+ class InsecureKeyFingerprint
+ attr_accessor :key
+
+ #
+ # Gets the base64 encoded string representing a rsa or dsa key
+ #
+ def initialize(key_base64)
+ @key = key_base64
+ end
+
+ def fingerprint
+ OpenSSL::Digest::MD5.hexdigest(Base64.decode64(@key)).scan(/../).join(':')
+ end
+ end
+end
diff --git a/lib/gitlab/issuable_metadata.rb b/lib/gitlab/issuable_metadata.rb
index 977c05910d3..0c9de72329c 100644
--- a/lib/gitlab/issuable_metadata.rb
+++ b/lib/gitlab/issuable_metadata.rb
@@ -1,6 +1,14 @@
module Gitlab
module IssuableMetadata
def issuable_meta_data(issuable_collection, collection_type)
+ # ActiveRecord uses Object#extend for null relations.
+ if !(issuable_collection.singleton_class < ActiveRecord::NullRelation) &&
+ issuable_collection.respond_to?(:limit_value) &&
+ issuable_collection.limit_value.nil?
+
+ raise 'Collection must have a limit applied for preloading meta-data'
+ end
+
# map has to be used here since using pluck or select will
# throw an error when ordering issuables by priority which inserts
# a new order into the collection.
diff --git a/lib/gitlab/job_waiter.rb b/lib/gitlab/job_waiter.rb
index 4d6bbda15f3..f7a8eae0be4 100644
--- a/lib/gitlab/job_waiter.rb
+++ b/lib/gitlab/job_waiter.rb
@@ -15,15 +15,23 @@ module Gitlab
# push to that array when done. Once the waiter has popped `count` items, it
# knows all the jobs are done.
class JobWaiter
+ KEY_PREFIX = "gitlab:job_waiter".freeze
+
def self.notify(key, jid)
Gitlab::Redis::SharedState.with { |redis| redis.lpush(key, jid) }
end
- attr_reader :key, :jobs_remaining, :finished
+ def self.key?(key)
+ key.is_a?(String) && key =~ /\A#{KEY_PREFIX}:\h{8}-\h{4}-\h{4}-\h{4}-\h{12}\z/
+ end
+
+ attr_reader :key, :finished
+ attr_accessor :jobs_remaining
# jobs_remaining - the number of jobs left to wait for
- def initialize(jobs_remaining)
- @key = "gitlab:job_waiter:#{SecureRandom.uuid}"
+ # key - The key of this waiter.
+ def initialize(jobs_remaining = 0, key = "#{KEY_PREFIX}:#{SecureRandom.uuid}")
+ @key = key
@jobs_remaining = jobs_remaining
@finished = []
end
diff --git a/lib/gitlab/kubernetes.rb b/lib/gitlab/kubernetes.rb
index cdbdfa10d0e..da43bd0af4b 100644
--- a/lib/gitlab/kubernetes.rb
+++ b/lib/gitlab/kubernetes.rb
@@ -113,7 +113,7 @@ module Gitlab
def kubeconfig_embed_ca_pem(config, ca_pem)
cluster = config.dig(:clusters, 0, :cluster)
- cluster[:'certificate-authority-data'] = Base64.encode64(ca_pem)
+ cluster[:'certificate-authority-data'] = Base64.strict_encode64(ca_pem)
end
end
end
diff --git a/lib/gitlab/kubernetes/config_map.rb b/lib/gitlab/kubernetes/config_map.rb
new file mode 100644
index 00000000000..95e1054919d
--- /dev/null
+++ b/lib/gitlab/kubernetes/config_map.rb
@@ -0,0 +1,37 @@
+module Gitlab
+ module Kubernetes
+ class ConfigMap
+ def initialize(name, values)
+ @name = name
+ @values = values
+ end
+
+ def generate
+ resource = ::Kubeclient::Resource.new
+ resource.metadata = metadata
+ resource.data = { values: values }
+ resource
+ end
+
+ private
+
+ attr_reader :name, :values
+
+ def metadata
+ {
+ name: config_map_name,
+ namespace: namespace,
+ labels: { name: config_map_name }
+ }
+ end
+
+ def config_map_name
+ "values-content-configuration-#{name}"
+ end
+
+ def namespace
+ Gitlab::Kubernetes::Helm::NAMESPACE
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/kubernetes/helm.rb b/lib/gitlab/kubernetes/helm.rb
new file mode 100644
index 00000000000..0f0588b8b23
--- /dev/null
+++ b/lib/gitlab/kubernetes/helm.rb
@@ -0,0 +1,8 @@
+module Gitlab
+ module Kubernetes
+ module Helm
+ HELM_VERSION = '2.7.0'.freeze
+ NAMESPACE = 'gitlab-managed-apps'.freeze
+ end
+ end
+end
diff --git a/lib/gitlab/kubernetes/helm/api.rb b/lib/gitlab/kubernetes/helm/api.rb
new file mode 100644
index 00000000000..2edd34109ba
--- /dev/null
+++ b/lib/gitlab/kubernetes/helm/api.rb
@@ -0,0 +1,45 @@
+module Gitlab
+ module Kubernetes
+ module Helm
+ class Api
+ def initialize(kubeclient)
+ @kubeclient = kubeclient
+ @namespace = Gitlab::Kubernetes::Namespace.new(Gitlab::Kubernetes::Helm::NAMESPACE, kubeclient)
+ end
+
+ def install(command)
+ @namespace.ensure_exists!
+ create_config_map(command) if command.config_map?
+ @kubeclient.create_pod(command.pod_resource)
+ end
+
+ ##
+ # Returns Pod phase
+ #
+ # https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#pod-phase
+ #
+ # values: "Pending", "Running", "Succeeded", "Failed", "Unknown"
+ #
+ def installation_status(pod_name)
+ @kubeclient.get_pod(pod_name, @namespace.name).status.phase
+ end
+
+ def installation_log(pod_name)
+ @kubeclient.get_pod_log(pod_name, @namespace.name).body
+ end
+
+ def delete_installation_pod!(pod_name)
+ @kubeclient.delete_pod(pod_name, @namespace.name)
+ end
+
+ private
+
+ def create_config_map(command)
+ command.config_map_resource.tap do |config_map_resource|
+ @kubeclient.create_config_map(config_map_resource)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/kubernetes/helm/base_command.rb b/lib/gitlab/kubernetes/helm/base_command.rb
new file mode 100644
index 00000000000..6e4df05aa7e
--- /dev/null
+++ b/lib/gitlab/kubernetes/helm/base_command.rb
@@ -0,0 +1,40 @@
+module Gitlab
+ module Kubernetes
+ module Helm
+ class BaseCommand
+ attr_reader :name
+
+ def initialize(name)
+ @name = name
+ end
+
+ def pod_resource
+ Gitlab::Kubernetes::Helm::Pod.new(self, namespace).generate
+ end
+
+ def generate_script
+ <<~HEREDOC
+ set -eo pipefail
+ apk add -U ca-certificates openssl >/dev/null
+ wget -q -O - https://kubernetes-helm.storage.googleapis.com/helm-v#{Gitlab::Kubernetes::Helm::HELM_VERSION}-linux-amd64.tar.gz | tar zxC /tmp >/dev/null
+ mv /tmp/linux-amd64/helm /usr/bin/
+ HEREDOC
+ end
+
+ def config_map?
+ false
+ end
+
+ def pod_name
+ "install-#{name}"
+ end
+
+ private
+
+ def namespace
+ Gitlab::Kubernetes::Helm::NAMESPACE
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/kubernetes/helm/init_command.rb b/lib/gitlab/kubernetes/helm/init_command.rb
new file mode 100644
index 00000000000..a02e64561f6
--- /dev/null
+++ b/lib/gitlab/kubernetes/helm/init_command.rb
@@ -0,0 +1,19 @@
+module Gitlab
+ module Kubernetes
+ module Helm
+ class InitCommand < BaseCommand
+ def generate_script
+ super + [
+ init_helm_command
+ ].join("\n")
+ end
+
+ private
+
+ def init_helm_command
+ "helm init >/dev/null"
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/kubernetes/helm/install_command.rb b/lib/gitlab/kubernetes/helm/install_command.rb
new file mode 100644
index 00000000000..30af3e97b4a
--- /dev/null
+++ b/lib/gitlab/kubernetes/helm/install_command.rb
@@ -0,0 +1,48 @@
+module Gitlab
+ module Kubernetes
+ module Helm
+ class InstallCommand < BaseCommand
+ attr_reader :name, :chart, :repository, :values
+
+ def initialize(name, chart:, values:, repository: nil)
+ @name = name
+ @chart = chart
+ @values = values
+ @repository = repository
+ end
+
+ def generate_script
+ super + [
+ init_command,
+ repository_command,
+ script_command
+ ].compact.join("\n")
+ end
+
+ def config_map?
+ true
+ end
+
+ def config_map_resource
+ Gitlab::Kubernetes::ConfigMap.new(name, values).generate
+ end
+
+ private
+
+ def init_command
+ 'helm init --client-only >/dev/null'
+ end
+
+ def repository_command
+ "helm repo add #{name} #{repository}" if repository
+ end
+
+ def script_command
+ <<~HEREDOC
+ helm install #{chart} --name #{name} --namespace #{Gitlab::Kubernetes::Helm::NAMESPACE} -f /data/helm/#{name}/config/values.yaml >/dev/null
+ HEREDOC
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/kubernetes/helm/pod.rb b/lib/gitlab/kubernetes/helm/pod.rb
new file mode 100644
index 00000000000..1e12299eefd
--- /dev/null
+++ b/lib/gitlab/kubernetes/helm/pod.rb
@@ -0,0 +1,81 @@
+module Gitlab
+ module Kubernetes
+ module Helm
+ class Pod
+ def initialize(command, namespace_name)
+ @command = command
+ @namespace_name = namespace_name
+ end
+
+ def generate
+ spec = { containers: [container_specification], restartPolicy: 'Never' }
+
+ if command.config_map?
+ spec[:volumes] = volumes_specification
+ spec[:containers][0][:volumeMounts] = volume_mounts_specification
+ end
+
+ ::Kubeclient::Resource.new(metadata: metadata, spec: spec)
+ end
+
+ private
+
+ attr_reader :command, :namespace_name, :kubeclient, :config_map
+
+ def container_specification
+ {
+ name: 'helm',
+ image: 'alpine:3.6',
+ env: generate_pod_env(command),
+ command: %w(/bin/sh),
+ args: %w(-c $(COMMAND_SCRIPT))
+ }
+ end
+
+ def labels
+ {
+ 'gitlab.org/action': 'install',
+ 'gitlab.org/application': command.name
+ }
+ end
+
+ def metadata
+ {
+ name: command.pod_name,
+ namespace: namespace_name,
+ labels: labels
+ }
+ end
+
+ def generate_pod_env(command)
+ {
+ HELM_VERSION: Gitlab::Kubernetes::Helm::HELM_VERSION,
+ TILLER_NAMESPACE: namespace_name,
+ COMMAND_SCRIPT: command.generate_script
+ }.map { |key, value| { name: key, value: value } }
+ end
+
+ def volumes_specification
+ [
+ {
+ name: 'configuration-volume',
+ configMap: {
+ name: "values-content-configuration-#{command.name}",
+ items: [{ key: 'values', path: 'values.yaml' }]
+ }
+ }
+ ]
+ end
+
+ def volume_mounts_specification
+ [
+ {
+ name: 'configuration-volume',
+ mountPath: "/data/helm/#{command.name}/config"
+ }
+ ]
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/kubernetes/namespace.rb b/lib/gitlab/kubernetes/namespace.rb
new file mode 100644
index 00000000000..fbbddb7bffa
--- /dev/null
+++ b/lib/gitlab/kubernetes/namespace.rb
@@ -0,0 +1,30 @@
+module Gitlab
+ module Kubernetes
+ class Namespace
+ attr_accessor :name
+
+ def initialize(name, client)
+ @name = name
+ @client = client
+ end
+
+ def exists?
+ @client.get_namespace(name)
+ rescue ::KubeException => ke
+ raise ke unless ke.error_code == 404
+
+ false
+ end
+
+ def create!
+ resource = ::Kubeclient::Resource.new(metadata: { name: name })
+
+ @client.create_namespace(resource)
+ end
+
+ def ensure_exists!
+ exists? || create!
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/kubernetes/pod.rb b/lib/gitlab/kubernetes/pod.rb
new file mode 100644
index 00000000000..f3842cdf762
--- /dev/null
+++ b/lib/gitlab/kubernetes/pod.rb
@@ -0,0 +1,12 @@
+module Gitlab
+ module Kubernetes
+ module Pod
+ PENDING = 'Pending'.freeze
+ RUNNING = 'Running'.freeze
+ SUCCEEDED = 'Succeeded'.freeze
+ FAILED = 'Failed'.freeze
+ UNKNOWN = 'Unknown'.freeze
+ PHASES = [PENDING, RUNNING, SUCCEEDED, FAILED, UNKNOWN].freeze
+ end
+ end
+end
diff --git a/lib/gitlab/ldap/access.rb b/lib/gitlab/ldap/access.rb
deleted file mode 100644
index fb68627dedf..00000000000
--- a/lib/gitlab/ldap/access.rb
+++ /dev/null
@@ -1,87 +0,0 @@
-# LDAP authorization model
-#
-# * Check if we are allowed access (not blocked)
-#
-module Gitlab
- module LDAP
- class Access
- attr_reader :provider, :user
-
- def self.open(user, &block)
- Gitlab::LDAP::Adapter.open(user.ldap_identity.provider) do |adapter|
- block.call(self.new(user, adapter))
- end
- end
-
- def self.allowed?(user)
- self.open(user) do |access|
- if access.allowed?
- Users::UpdateService.new(user, last_credential_check_at: Time.now).execute
-
- true
- else
- false
- end
- end
- end
-
- def initialize(user, adapter = nil)
- @adapter = adapter
- @user = user
- @provider = user.ldap_identity.provider
- end
-
- def allowed?
- if ldap_user
- unless ldap_config.active_directory
- unblock_user(user, 'is available again') if user.ldap_blocked?
- return true
- end
-
- # Block user in GitLab if he/she was blocked in AD
- if Gitlab::LDAP::Person.disabled_via_active_directory?(user.ldap_identity.extern_uid, adapter)
- block_user(user, 'is disabled in Active Directory')
- false
- else
- unblock_user(user, 'is not disabled anymore') if user.ldap_blocked?
- true
- end
- else
- # Block the user if they no longer exist in LDAP/AD
- block_user(user, 'does not exist anymore')
- false
- end
- end
-
- def adapter
- @adapter ||= Gitlab::LDAP::Adapter.new(provider)
- end
-
- def ldap_config
- Gitlab::LDAP::Config.new(provider)
- end
-
- def ldap_user
- @ldap_user ||= Gitlab::LDAP::Person.find_by_dn(user.ldap_identity.extern_uid, adapter)
- end
-
- def block_user(user, reason)
- user.ldap_block
-
- Gitlab::AppLogger.info(
- "LDAP account \"#{user.ldap_identity.extern_uid}\" #{reason}, " \
- "blocking Gitlab user \"#{user.name}\" (#{user.email})"
- )
- end
-
- def unblock_user(user, reason)
- user.activate
-
- Gitlab::AppLogger.info(
- "LDAP account \"#{user.ldap_identity.extern_uid}\" #{reason}, " \
- "unblocking Gitlab user \"#{user.name}\" (#{user.email})"
- )
- end
- end
- end
-end
diff --git a/lib/gitlab/ldap/adapter.rb b/lib/gitlab/ldap/adapter.rb
deleted file mode 100644
index cd7e4ca7b7e..00000000000
--- a/lib/gitlab/ldap/adapter.rb
+++ /dev/null
@@ -1,104 +0,0 @@
-module Gitlab
- module LDAP
- class Adapter
- attr_reader :provider, :ldap
-
- def self.open(provider, &block)
- Net::LDAP.open(config(provider).adapter_options) do |ldap|
- block.call(self.new(provider, ldap))
- end
- end
-
- def self.config(provider)
- Gitlab::LDAP::Config.new(provider)
- end
-
- def initialize(provider, ldap = nil)
- @provider = provider
- @ldap = ldap || Net::LDAP.new(config.adapter_options)
- end
-
- def config
- Gitlab::LDAP::Config.new(provider)
- end
-
- def users(field, value, limit = nil)
- options = user_options(field, value, limit)
-
- entries = ldap_search(options).select do |entry|
- entry.respond_to? config.uid
- end
-
- entries.map do |entry|
- Gitlab::LDAP::Person.new(entry, provider)
- end
- end
-
- def user(*args)
- users(*args).first
- end
-
- def dn_matches_filter?(dn, filter)
- ldap_search(base: dn,
- filter: filter,
- scope: Net::LDAP::SearchScope_BaseObject,
- attributes: %w{dn}).any?
- end
-
- def ldap_search(*args)
- # Net::LDAP's `time` argument doesn't work. Use Ruby `Timeout` instead.
- Timeout.timeout(config.timeout) do
- results = ldap.search(*args)
-
- if results.nil?
- response = ldap.get_operation_result
-
- unless response.code.zero?
- Rails.logger.warn("LDAP search error: #{response.message}")
- end
-
- []
- else
- results
- end
- end
- rescue Net::LDAP::Error => error
- Rails.logger.warn("LDAP search raised exception #{error.class}: #{error.message}")
- []
- rescue Timeout::Error
- Rails.logger.warn("LDAP search timed out after #{config.timeout} seconds")
- []
- end
-
- private
-
- def user_options(field, value, limit)
- options = { attributes: Gitlab::LDAP::Person.ldap_attributes(config).compact.uniq }
- options[:size] = limit if limit
-
- if field.to_sym == :dn
- options[:base] = value
- options[:scope] = Net::LDAP::SearchScope_BaseObject
- options[:filter] = user_filter
- else
- options[:base] = config.base
- options[:filter] = user_filter(Net::LDAP::Filter.eq(field, value))
- end
-
- options
- end
-
- def user_filter(filter = nil)
- user_filter = config.constructed_user_filter if config.user_filter.present?
-
- if user_filter && filter
- Net::LDAP::Filter.join(filter, user_filter)
- elsif user_filter
- user_filter
- else
- filter
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/ldap/auth_hash.rb b/lib/gitlab/ldap/auth_hash.rb
deleted file mode 100644
index 4fbc5fa5262..00000000000
--- a/lib/gitlab/ldap/auth_hash.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# Class to parse and transform the info provided by omniauth
-#
-module Gitlab
- module LDAP
- class AuthHash < Gitlab::OAuth::AuthHash
- private
-
- def get_info(key)
- attributes = ldap_config.attributes[key.to_s]
- return super unless attributes
-
- attributes = Array(attributes)
-
- value = nil
- attributes.each do |attribute|
- value = get_raw(attribute)
- value = value.first if value
- break if value.present?
- end
-
- return super unless value
-
- Gitlab::Utils.force_utf8(value)
- value
- end
-
- def get_raw(key)
- auth_hash.extra[:raw_info][key] if auth_hash.extra
- end
-
- def ldap_config
- @ldap_config ||= Gitlab::LDAP::Config.new(self.provider)
- end
- end
- end
-end
diff --git a/lib/gitlab/ldap/authentication.rb b/lib/gitlab/ldap/authentication.rb
deleted file mode 100644
index ed1de73f8c6..00000000000
--- a/lib/gitlab/ldap/authentication.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# These calls help to authenticate to LDAP by providing username and password
-#
-# Since multiple LDAP servers are supported, it will loop through all of them
-# until a valid bind is found
-#
-
-module Gitlab
- module LDAP
- class Authentication
- def self.login(login, password)
- return unless Gitlab::LDAP::Config.enabled?
- return unless login.present? && password.present?
-
- auth = nil
- # loop through providers until valid bind
- providers.find do |provider|
- auth = new(provider)
- auth.login(login, password) # true will exit the loop
- end
-
- # If (login, password) was invalid for all providers, the value of auth is now the last
- # Gitlab::LDAP::Authentication instance we tried.
- auth.user
- end
-
- def self.providers
- Gitlab::LDAP::Config.providers
- end
-
- attr_accessor :provider, :ldap_user
-
- def initialize(provider)
- @provider = provider
- end
-
- def login(login, password)
- @ldap_user = adapter.bind_as(
- filter: user_filter(login),
- size: 1,
- password: password
- )
- end
-
- def adapter
- OmniAuth::LDAP::Adaptor.new(config.omniauth_options)
- end
-
- def config
- Gitlab::LDAP::Config.new(provider)
- end
-
- def user_filter(login)
- filter = Net::LDAP::Filter.equals(config.uid, login)
-
- # Apply LDAP user filter if present
- if config.user_filter.present?
- filter = Net::LDAP::Filter.join(filter, config.constructed_user_filter)
- end
-
- filter
- end
-
- def user
- return nil unless ldap_user
- Gitlab::LDAP::User.find_by_uid_and_provider(ldap_user.dn, provider)
- end
- end
- end
-end
diff --git a/lib/gitlab/ldap/config.rb b/lib/gitlab/ldap/config.rb
deleted file mode 100644
index c8f19cd52d5..00000000000
--- a/lib/gitlab/ldap/config.rb
+++ /dev/null
@@ -1,230 +0,0 @@
-# Load a specific server configuration
-module Gitlab
- module LDAP
- class Config
- NET_LDAP_ENCRYPTION_METHOD = {
- simple_tls: :simple_tls,
- start_tls: :start_tls,
- plain: nil
- }.freeze
-
- attr_accessor :provider, :options
-
- def self.enabled?
- Gitlab.config.ldap.enabled
- end
-
- def self.servers
- Gitlab.config.ldap.servers.values
- end
-
- def self.available_servers
- return [] unless enabled?
-
- Array.wrap(servers.first)
- end
-
- def self.providers
- servers.map { |server| server['provider_name'] }
- end
-
- def self.valid_provider?(provider)
- providers.include?(provider)
- end
-
- def self.invalid_provider(provider)
- raise "Unknown provider (#{provider}). Available providers: #{providers}"
- end
-
- def initialize(provider)
- if self.class.valid_provider?(provider)
- @provider = provider
- else
- self.class.invalid_provider(provider)
- end
- @options = config_for(@provider) # Use @provider, not provider
- end
-
- def enabled?
- base_config.enabled
- end
-
- def adapter_options
- opts = base_options.merge(
- encryption: encryption_options
- )
-
- opts.merge!(auth_options) if has_auth?
-
- opts
- end
-
- def omniauth_options
- opts = base_options.merge(
- base: base,
- encryption: options['encryption'],
- filter: omniauth_user_filter,
- name_proc: name_proc,
- disable_verify_certificates: !options['verify_certificates']
- )
-
- if has_auth?
- opts.merge!(
- bind_dn: options['bind_dn'],
- password: options['password']
- )
- end
-
- opts[:ca_file] = options['ca_file'] if options['ca_file'].present?
- opts[:ssl_version] = options['ssl_version'] if options['ssl_version'].present?
-
- opts
- end
-
- def base
- options['base']
- end
-
- def uid
- options['uid']
- end
-
- def sync_ssh_keys?
- sync_ssh_keys.present?
- end
-
- # The LDAP attribute in which the ssh keys are stored
- def sync_ssh_keys
- options['sync_ssh_keys']
- end
-
- def user_filter
- options['user_filter']
- end
-
- def constructed_user_filter
- @constructed_user_filter ||= Net::LDAP::Filter.construct(user_filter)
- end
-
- def group_base
- options['group_base']
- end
-
- def admin_group
- options['admin_group']
- end
-
- def active_directory
- options['active_directory']
- end
-
- def block_auto_created_users
- options['block_auto_created_users']
- end
-
- def attributes
- default_attributes.merge(options['attributes'])
- end
-
- def timeout
- options['timeout'].to_i
- end
-
- def has_auth?
- options['password'] || options['bind_dn']
- end
-
- def allow_username_or_email_login
- options['allow_username_or_email_login']
- end
-
- def name_proc
- if allow_username_or_email_login
- proc { |name| name.gsub(/@.*\z/, '') }
- else
- proc { |name| name }
- end
- end
-
- def default_attributes
- {
- 'username' => %w(uid userid sAMAccountName),
- 'email' => %w(mail email userPrincipalName),
- 'name' => 'cn',
- 'first_name' => 'givenName',
- 'last_name' => 'sn'
- }
- end
-
- protected
-
- def base_options
- {
- host: options['host'],
- port: options['port']
- }
- end
-
- def base_config
- Gitlab.config.ldap
- end
-
- def config_for(provider)
- base_config.servers.values.find { |server| server['provider_name'] == provider }
- end
-
- def encryption_options
- method = translate_method(options['encryption'])
- return nil unless method
-
- {
- method: method,
- tls_options: tls_options(method)
- }
- end
-
- def translate_method(method_from_config)
- NET_LDAP_ENCRYPTION_METHOD[method_from_config.to_sym]
- end
-
- def tls_options(method)
- return { verify_mode: OpenSSL::SSL::VERIFY_NONE } unless method
-
- opts = if options['verify_certificates']
- OpenSSL::SSL::SSLContext::DEFAULT_PARAMS
- else
- # It is important to explicitly set verify_mode for two reasons:
- # 1. The behavior of OpenSSL is undefined when verify_mode is not set.
- # 2. The net-ldap gem implementation verifies the certificate hostname
- # unless verify_mode is set to VERIFY_NONE.
- { verify_mode: OpenSSL::SSL::VERIFY_NONE }
- end
-
- opts[:ca_file] = options['ca_file'] if options['ca_file'].present?
- opts[:ssl_version] = options['ssl_version'] if options['ssl_version'].present?
-
- opts
- end
-
- def auth_options
- {
- auth: {
- method: :simple,
- username: options['bind_dn'],
- password: options['password']
- }
- }
- end
-
- def omniauth_user_filter
- uid_filter = Net::LDAP::Filter.eq(uid, '%{username}')
-
- if user_filter.present?
- Net::LDAP::Filter.join(uid_filter, constructed_user_filter).to_s
- else
- uid_filter.to_s
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/ldap/person.rb b/lib/gitlab/ldap/person.rb
deleted file mode 100644
index 4d6f8ac79de..00000000000
--- a/lib/gitlab/ldap/person.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-module Gitlab
- module LDAP
- class Person
- # Active Directory-specific LDAP filter that checks if bit 2 of the
- # userAccountControl attribute is set.
- # Source: http://ctogonewild.com/2009/09/03/bitmask-searches-in-ldap/
- AD_USER_DISABLED = Net::LDAP::Filter.ex("userAccountControl:1.2.840.113556.1.4.803", "2")
-
- attr_accessor :entry, :provider
-
- def self.find_by_uid(uid, adapter)
- uid = Net::LDAP::Filter.escape(uid)
- adapter.user(adapter.config.uid, uid)
- end
-
- def self.find_by_dn(dn, adapter)
- adapter.user('dn', dn)
- end
-
- def self.disabled_via_active_directory?(dn, adapter)
- adapter.dn_matches_filter?(dn, AD_USER_DISABLED)
- end
-
- def self.ldap_attributes(config)
- [
- 'dn', # Used in `dn`
- config.uid, # Used in `uid`
- *config.attributes['name'], # Used in `name`
- *config.attributes['email'] # Used in `email`
- ]
- end
-
- def initialize(entry, provider)
- Rails.logger.debug { "Instantiating #{self.class.name} with LDIF:\n#{entry.to_ldif}" }
- @entry = entry
- @provider = provider
- end
-
- def name
- attribute_value(:name).first
- end
-
- def uid
- entry.public_send(config.uid).first # rubocop:disable GitlabSecurity/PublicSend
- end
-
- def username
- uid
- end
-
- def email
- attribute_value(:email)
- end
-
- delegate :dn, to: :entry
-
- private
-
- def entry
- @entry
- end
-
- def config
- @config ||= Gitlab::LDAP::Config.new(provider)
- end
-
- # Using the LDAP attributes configuration, find and return the first
- # attribute with a value. For example, by default, when given 'email',
- # this method looks for 'mail', 'email' and 'userPrincipalName' and
- # returns the first with a value.
- def attribute_value(attribute)
- attributes = Array(config.attributes[attribute.to_s])
- selected_attr = attributes.find { |attr| entry.respond_to?(attr) }
-
- return nil unless selected_attr
-
- entry.public_send(selected_attr) # rubocop:disable GitlabSecurity/PublicSend
- end
- end
- end
-end
diff --git a/lib/gitlab/ldap/user.rb b/lib/gitlab/ldap/user.rb
deleted file mode 100644
index 3bf27b37ae6..00000000000
--- a/lib/gitlab/ldap/user.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# LDAP extension for User model
-#
-# * Find or create user from omniauth.auth data
-# * Links LDAP account with existing user
-# * Auth LDAP user with login and password
-#
-module Gitlab
- module LDAP
- class User < Gitlab::OAuth::User
- class << self
- def find_by_uid_and_provider(uid, provider)
- # LDAP distinguished name is case-insensitive
- identity = ::Identity
- .where(provider: provider)
- .iwhere(extern_uid: uid).last
- identity && identity.user
- end
- end
-
- def initialize(auth_hash)
- super
- update_user_attributes
- end
-
- def save
- super('LDAP')
- end
-
- # instance methods
- def gl_user
- @gl_user ||= find_by_uid_and_provider || find_by_email || build_new_user
- end
-
- def find_by_uid_and_provider
- self.class.find_by_uid_and_provider(auth_hash.uid, auth_hash.provider)
- end
-
- def find_by_email
- ::User.find_by(email: auth_hash.email.downcase) if auth_hash.has_attribute?(:email)
- end
-
- def update_user_attributes
- if persisted?
- # find_or_initialize_by doesn't update `gl_user.identities`, and isn't autosaved.
- identity = gl_user.identities.find { |identity| identity.provider == auth_hash.provider }
- identity ||= gl_user.identities.build(provider: auth_hash.provider)
-
- # For a new identity set extern_uid to the LDAP DN
- # For an existing identity with matching email but changed DN, update the DN.
- # For an existing identity with no change in DN, this line changes nothing.
- identity.extern_uid = auth_hash.uid
- end
- end
-
- def changed?
- gl_user.changed? || gl_user.identities.any?(&:changed?)
- end
-
- def block_after_signup?
- ldap_config.block_auto_created_users
- end
-
- def sync_profile_from_provider?
- true
- end
-
- def allowed?
- Gitlab::LDAP::Access.allowed?(gl_user)
- end
-
- def ldap_config
- Gitlab::LDAP::Config.new(auth_hash.provider)
- end
-
- def auth_hash=(auth_hash)
- @auth_hash = Gitlab::LDAP::AuthHash.new(auth_hash)
- end
- end
- end
-end
diff --git a/lib/gitlab/github_import/base_formatter.rb b/lib/gitlab/legacy_github_import/base_formatter.rb
index f330041cc00..2f07fde406c 100644
--- a/lib/gitlab/github_import/base_formatter.rb
+++ b/lib/gitlab/legacy_github_import/base_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class BaseFormatter
attr_reader :client, :formatter, :project, :raw_data
diff --git a/lib/gitlab/github_import/branch_formatter.rb b/lib/gitlab/legacy_github_import/branch_formatter.rb
index 8aa885fb811..80fe1d67209 100644
--- a/lib/gitlab/github_import/branch_formatter.rb
+++ b/lib/gitlab/legacy_github_import/branch_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class BranchFormatter < BaseFormatter
delegate :repo, :sha, :ref, to: :raw_data
diff --git a/lib/gitlab/legacy_github_import/client.rb b/lib/gitlab/legacy_github_import/client.rb
new file mode 100644
index 00000000000..53c910d44bd
--- /dev/null
+++ b/lib/gitlab/legacy_github_import/client.rb
@@ -0,0 +1,148 @@
+module Gitlab
+ module LegacyGithubImport
+ class Client
+ GITHUB_SAFE_REMAINING_REQUESTS = 100
+ GITHUB_SAFE_SLEEP_TIME = 500
+
+ attr_reader :access_token, :host, :api_version
+
+ def initialize(access_token, host: nil, api_version: 'v3')
+ @access_token = access_token
+ @host = host.to_s.sub(%r{/+\z}, '')
+ @api_version = api_version
+ @users = {}
+
+ if access_token
+ ::Octokit.auto_paginate = false
+ end
+ end
+
+ def api
+ @api ||= ::Octokit::Client.new(
+ access_token: access_token,
+ api_endpoint: api_endpoint,
+ # If there is no config, we're connecting to github.com and we
+ # should verify ssl.
+ connection_options: {
+ ssl: { verify: config ? config['verify_ssl'] : true }
+ }
+ )
+ end
+
+ def client
+ unless config
+ raise Projects::ImportService::Error,
+ 'OAuth configuration for GitHub missing.'
+ end
+
+ @client ||= ::OAuth2::Client.new(
+ config.app_id,
+ config.app_secret,
+ github_options.merge(ssl: { verify: config['verify_ssl'] })
+ )
+ end
+
+ def authorize_url(redirect_uri)
+ client.auth_code.authorize_url({
+ redirect_uri: redirect_uri,
+ scope: "repo, user, user:email"
+ })
+ end
+
+ def get_token(code)
+ client.auth_code.get_token(code).token
+ end
+
+ def method_missing(method, *args, &block)
+ if api.respond_to?(method)
+ request(method, *args, &block)
+ else
+ super(method, *args, &block)
+ end
+ end
+
+ def respond_to?(method)
+ api.respond_to?(method) || super
+ end
+
+ def user(login)
+ return nil unless login.present?
+ return @users[login] if @users.key?(login)
+
+ @users[login] = api.user(login)
+ end
+
+ private
+
+ def api_endpoint
+ if host.present? && api_version.present?
+ "#{host}/api/#{api_version}"
+ else
+ github_options[:site]
+ end
+ end
+
+ def config
+ Gitlab.config.omniauth.providers.find { |provider| provider.name == "github" }
+ end
+
+ def github_options
+ if config
+ config["args"]["client_options"].deep_symbolize_keys
+ else
+ OmniAuth::Strategies::GitHub.default_options[:client_options].symbolize_keys
+ end
+ end
+
+ def rate_limit
+ api.rate_limit!
+ # GitHub Rate Limit API returns 404 when the rate limit is
+ # disabled. In this case we just want to return gracefully
+ # instead of spitting out an error.
+ rescue Octokit::NotFound
+ nil
+ end
+
+ def has_rate_limit?
+ return @has_rate_limit if defined?(@has_rate_limit)
+
+ @has_rate_limit = rate_limit.present?
+ end
+
+ def rate_limit_exceed?
+ has_rate_limit? && rate_limit.remaining <= GITHUB_SAFE_REMAINING_REQUESTS
+ end
+
+ def rate_limit_sleep_time
+ rate_limit.resets_in + GITHUB_SAFE_SLEEP_TIME
+ end
+
+ def request(method, *args, &block)
+ sleep rate_limit_sleep_time if rate_limit_exceed?
+
+ data = api.__send__(method, *args) # rubocop:disable GitlabSecurity/PublicSend
+ return data unless data.is_a?(Array)
+
+ last_response = api.last_response
+
+ if block_given?
+ yield data
+ # api.last_response could change while we're yielding (e.g. fetching labels for each PR)
+ # so we cache our own last response
+ each_response_page(last_response, &block)
+ else
+ each_response_page(last_response) { |page| data.concat(page) }
+ data
+ end
+ end
+
+ def each_response_page(last_response)
+ while last_response.rels[:next]
+ sleep rate_limit_sleep_time if rate_limit_exceed?
+ last_response = last_response.rels[:next].get
+ yield last_response.data if last_response.data.is_a?(Array)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/comment_formatter.rb b/lib/gitlab/legacy_github_import/comment_formatter.rb
index e21922070c1..d2c7a8ae9f4 100644
--- a/lib/gitlab/github_import/comment_formatter.rb
+++ b/lib/gitlab/legacy_github_import/comment_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class CommentFormatter < BaseFormatter
attr_writer :author_id
@@ -38,7 +38,7 @@ module Gitlab
end
def generate_line_code(line)
- Gitlab::Diff::LineCode.generate(file_path, line.new_pos, line.old_pos)
+ Gitlab::Git.diff_line_code(file_path, line.new_pos, line.old_pos)
end
def on_diff?
diff --git a/lib/gitlab/github_import/importer.rb b/lib/gitlab/legacy_github_import/importer.rb
index b8c07460ebb..0526ef9eb13 100644
--- a/lib/gitlab/github_import/importer.rb
+++ b/lib/gitlab/legacy_github_import/importer.rb
@@ -1,8 +1,12 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class Importer
include Gitlab::ShellAdapter
+ def self.refmap
+ Gitlab::GithubImport.refmap
+ end
+
attr_reader :errors, :project, :repo, :repo_url
def initialize(project)
@@ -15,6 +19,7 @@ module Gitlab
def client
return @client if defined?(@client)
+
unless credentials
raise Projects::ImportService::Error,
"Unable to find project import data credentials for project ID: #{@project.id}"
diff --git a/lib/gitlab/github_import/issuable_formatter.rb b/lib/gitlab/legacy_github_import/issuable_formatter.rb
index 27b171d6ddb..de55382d3ad 100644
--- a/lib/gitlab/github_import/issuable_formatter.rb
+++ b/lib/gitlab/legacy_github_import/issuable_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class IssuableFormatter < BaseFormatter
attr_writer :assignee_id, :author_id
diff --git a/lib/gitlab/github_import/issue_formatter.rb b/lib/gitlab/legacy_github_import/issue_formatter.rb
index 977cd0423ba..4c8825ccf19 100644
--- a/lib/gitlab/github_import/issue_formatter.rb
+++ b/lib/gitlab/legacy_github_import/issue_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class IssueFormatter < IssuableFormatter
def attributes
{
diff --git a/lib/gitlab/github_import/label_formatter.rb b/lib/gitlab/legacy_github_import/label_formatter.rb
index 211ccdc51bb..c3eed12e739 100644
--- a/lib/gitlab/github_import/label_formatter.rb
+++ b/lib/gitlab/legacy_github_import/label_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class LabelFormatter < BaseFormatter
def attributes
{
diff --git a/lib/gitlab/github_import/milestone_formatter.rb b/lib/gitlab/legacy_github_import/milestone_formatter.rb
index dd782eff059..a565294384d 100644
--- a/lib/gitlab/github_import/milestone_formatter.rb
+++ b/lib/gitlab/legacy_github_import/milestone_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class MilestoneFormatter < BaseFormatter
def attributes
{
diff --git a/lib/gitlab/github_import/project_creator.rb b/lib/gitlab/legacy_github_import/project_creator.rb
index a55adc9b1c8..cbabe5454ca 100644
--- a/lib/gitlab/github_import/project_creator.rb
+++ b/lib/gitlab/legacy_github_import/project_creator.rb
@@ -1,8 +1,6 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class ProjectCreator
- include Gitlab::CurrentSettings
-
attr_reader :repo, :name, :namespace, :current_user, :session_data, :type
def initialize(repo, name, namespace, current_user, session_data, type: 'github')
@@ -36,7 +34,7 @@ module Gitlab
end
def visibility_level
- repo.private ? Gitlab::VisibilityLevel::PRIVATE : current_application_settings.default_project_visibility
+ repo.private ? Gitlab::VisibilityLevel::PRIVATE : Gitlab::CurrentSettings.default_project_visibility
end
#
diff --git a/lib/gitlab/github_import/pull_request_formatter.rb b/lib/gitlab/legacy_github_import/pull_request_formatter.rb
index 150afa31432..94c2e99066a 100644
--- a/lib/gitlab/github_import/pull_request_formatter.rb
+++ b/lib/gitlab/legacy_github_import/pull_request_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class PullRequestFormatter < IssuableFormatter
delegate :user, :project, :ref, :repo, :sha, to: :source_branch, prefix: true
delegate :user, :exists?, :project, :ref, :repo, :sha, :short_sha, to: :target_branch, prefix: true
diff --git a/lib/gitlab/github_import/release_formatter.rb b/lib/gitlab/legacy_github_import/release_formatter.rb
index 1ad702a6058..3ed9d4f76da 100644
--- a/lib/gitlab/github_import/release_formatter.rb
+++ b/lib/gitlab/legacy_github_import/release_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class ReleaseFormatter < BaseFormatter
def attributes
{
diff --git a/lib/gitlab/github_import/user_formatter.rb b/lib/gitlab/legacy_github_import/user_formatter.rb
index 04c2964da20..6d8055622f1 100644
--- a/lib/gitlab/github_import/user_formatter.rb
+++ b/lib/gitlab/legacy_github_import/user_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class UserFormatter
attr_reader :client, :raw
diff --git a/lib/gitlab/github_import/wiki_formatter.rb b/lib/gitlab/legacy_github_import/wiki_formatter.rb
index 0396122eeb9..27f45875c7c 100644
--- a/lib/gitlab/github_import/wiki_formatter.rb
+++ b/lib/gitlab/legacy_github_import/wiki_formatter.rb
@@ -1,5 +1,5 @@
module Gitlab
- module GithubImport
+ module LegacyGithubImport
class WikiFormatter
attr_reader :project
@@ -8,7 +8,7 @@ module Gitlab
end
def disk_path
- "#{project.disk_path}.wiki"
+ project.wiki.disk_path
end
def import_url
diff --git a/lib/gitlab/lfs_token.rb b/lib/gitlab/lfs_token.rb
index 8e57ba831c5..ead5d566871 100644
--- a/lib/gitlab/lfs_token.rb
+++ b/lib/gitlab/lfs_token.rb
@@ -27,6 +27,10 @@ module Gitlab
end
end
+ def deploy_key_pushable?(project)
+ actor.is_a?(DeployKey) && actor.can_push_to?(project)
+ end
+
def user?
actor.is_a?(User)
end
diff --git a/lib/gitlab/logger.rb b/lib/gitlab/logger.rb
index 6bffd410ed0..a42e312b5d3 100644
--- a/lib/gitlab/logger.rb
+++ b/lib/gitlab/logger.rb
@@ -13,7 +13,7 @@ module Gitlab
end
def self.read_latest
- path = Rails.root.join("log", file_name)
+ path = self.full_log_path
return [] unless File.readable?(path)
@@ -22,7 +22,15 @@ module Gitlab
end
def self.build
- new(Rails.root.join("log", file_name))
+ RequestStore[self.cache_key] ||= new(self.full_log_path)
+ end
+
+ def self.full_log_path
+ Rails.root.join("log", file_name)
+ end
+
+ def self.cache_key
+ 'logger:'.freeze + self.full_log_path.to_s
end
end
end
diff --git a/lib/gitlab/markdown/pipeline.rb b/lib/gitlab/markdown/pipeline.rb
deleted file mode 100644
index 306923902e0..00000000000
--- a/lib/gitlab/markdown/pipeline.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-module Gitlab
- module Markdown
- class Pipeline
- def self.[](name)
- name ||= :full
- const_get("#{name.to_s.camelize}Pipeline")
- end
-
- def self.filters
- []
- end
-
- def self.transform_context(context)
- context
- end
-
- def self.html_pipeline
- @html_pipeline ||= HTML::Pipeline.new(filters)
- end
-
- class << self
- %i(call to_document to_html).each do |meth|
- define_method(meth) do |text, context|
- context = transform_context(context)
-
- html_pipeline.__send__(meth, text, context) # rubocop:disable GitlabSecurity/PublicSend
- end
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/metrics.rb b/lib/gitlab/metrics.rb
index 4779755bb22..7d63ca5627d 100644
--- a/lib/gitlab/metrics.rb
+++ b/lib/gitlab/metrics.rb
@@ -1,7 +1,7 @@
module Gitlab
module Metrics
- extend Gitlab::Metrics::InfluxDb
- extend Gitlab::Metrics::Prometheus
+ include Gitlab::Metrics::InfluxDb
+ include Gitlab::Metrics::Prometheus
def self.enabled?
influx_metrics_enabled? || prometheus_metrics_enabled?
diff --git a/lib/gitlab/metrics/background_transaction.rb b/lib/gitlab/metrics/background_transaction.rb
new file mode 100644
index 00000000000..5919ebb1493
--- /dev/null
+++ b/lib/gitlab/metrics/background_transaction.rb
@@ -0,0 +1,14 @@
+module Gitlab
+ module Metrics
+ class BackgroundTransaction < Transaction
+ def initialize(worker_class)
+ super()
+ @worker_class = worker_class
+ end
+
+ def labels
+ { controller: @worker_class.name, action: 'perform' }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/base_sampler.rb b/lib/gitlab/metrics/base_sampler.rb
deleted file mode 100644
index 716d20bb91a..00000000000
--- a/lib/gitlab/metrics/base_sampler.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-require 'logger'
-module Gitlab
- module Metrics
- class BaseSampler < Daemon
- # interval - The sampling interval in seconds.
- def initialize(interval)
- interval_half = interval.to_f / 2
-
- @interval = interval
- @interval_steps = (-interval_half..interval_half).step(0.1).to_a
-
- super()
- end
-
- def safe_sample
- sample
- rescue => e
- Rails.logger.warn("#{self.class}: #{e}, stopping")
- stop
- end
-
- def sample
- raise NotImplementedError
- end
-
- # Returns the sleep interval with a random adjustment.
- #
- # The random adjustment is put in place to ensure we:
- #
- # 1. Don't generate samples at the exact same interval every time (thus
- # potentially missing anything that happens in between samples).
- # 2. Don't sample data at the same interval two times in a row.
- def sleep_interval
- while (step = @interval_steps.sample)
- if step != @last_step
- @last_step = step
-
- return @interval + @last_step
- end
- end
- end
-
- private
-
- attr_reader :running
-
- def start_working
- @running = true
- sleep(sleep_interval)
-
- while running
- safe_sample
-
- sleep(sleep_interval)
- end
- end
-
- def stop_working
- @running = false
- end
- end
- end
-end
diff --git a/lib/gitlab/metrics/influx_db.rb b/lib/gitlab/metrics/influx_db.rb
index 7b06bb953aa..66f30e3b397 100644
--- a/lib/gitlab/metrics/influx_db.rb
+++ b/lib/gitlab/metrics/influx_db.rb
@@ -1,169 +1,186 @@
module Gitlab
module Metrics
module InfluxDb
- include Gitlab::CurrentSettings
- extend self
+ extend ActiveSupport::Concern
+ include Gitlab::Metrics::Methods
+
+ EXECUTION_MEASUREMENT_BUCKETS = [0.001, 0.01, 0.1, 1].freeze
MUTEX = Mutex.new
private_constant :MUTEX
- def influx_metrics_enabled?
- settings[:enabled] || false
- end
+ class_methods do
+ def influx_metrics_enabled?
+ settings[:enabled] || false
+ end
- RAILS_ROOT = Rails.root.to_s
- METRICS_ROOT = Rails.root.join('lib', 'gitlab', 'metrics').to_s
- PATH_REGEX = /^#{RAILS_ROOT}\/?/
-
- def settings
- @settings ||= {
- enabled: current_application_settings[:metrics_enabled],
- pool_size: current_application_settings[:metrics_pool_size],
- timeout: current_application_settings[:metrics_timeout],
- method_call_threshold: current_application_settings[:metrics_method_call_threshold],
- host: current_application_settings[:metrics_host],
- port: current_application_settings[:metrics_port],
- sample_interval: current_application_settings[:metrics_sample_interval] || 15,
- packet_size: current_application_settings[:metrics_packet_size] || 1
- }
- end
+ # Prometheus histogram buckets used for arbitrary code measurements
+
+ def settings
+ @settings ||= begin
+ current_settings = Gitlab::CurrentSettings.current_application_settings
+
+ {
+ enabled: current_settings[:metrics_enabled],
+ pool_size: current_settings[:metrics_pool_size],
+ timeout: current_settings[:metrics_timeout],
+ method_call_threshold: current_settings[:metrics_method_call_threshold],
+ host: current_settings[:metrics_host],
+ port: current_settings[:metrics_port],
+ sample_interval: current_settings[:metrics_sample_interval] || 15,
+ packet_size: current_settings[:metrics_packet_size] || 1
+ }
+ end
+ end
- def mri?
- RUBY_ENGINE == 'ruby'
- end
+ def mri?
+ RUBY_ENGINE == 'ruby'
+ end
- def method_call_threshold
- # This is memoized since this method is called for every instrumented
- # method. Loading data from an external cache on every method call slows
- # things down too much.
- @method_call_threshold ||= settings[:method_call_threshold]
- end
+ def method_call_threshold
+ # This is memoized since this method is called for every instrumented
+ # method. Loading data from an external cache on every method call slows
+ # things down too much.
+ # in milliseconds
+ @method_call_threshold ||= settings[:method_call_threshold]
+ end
- def submit_metrics(metrics)
- prepared = prepare_metrics(metrics)
+ def submit_metrics(metrics)
+ prepared = prepare_metrics(metrics)
- pool&.with do |connection|
- prepared.each_slice(settings[:packet_size]) do |slice|
- begin
- connection.write_points(slice)
- rescue StandardError
+ pool&.with do |connection|
+ prepared.each_slice(settings[:packet_size]) do |slice|
+ begin
+ connection.write_points(slice)
+ rescue StandardError
+ end
end
end
+ rescue Errno::EADDRNOTAVAIL, SocketError => ex
+ Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.')
+ Gitlab::EnvironmentLogger.error(ex)
end
- rescue Errno::EADDRNOTAVAIL, SocketError => ex
- Gitlab::EnvironmentLogger.error('Cannot resolve InfluxDB address. GitLab Performance Monitoring will not work.')
- Gitlab::EnvironmentLogger.error(ex)
- end
- def prepare_metrics(metrics)
- metrics.map do |hash|
- new_hash = hash.symbolize_keys
+ def prepare_metrics(metrics)
+ metrics.map do |hash|
+ new_hash = hash.symbolize_keys
- new_hash[:tags].each do |key, value|
- if value.blank?
- new_hash[:tags].delete(key)
- else
- new_hash[:tags][key] = escape_value(value)
+ new_hash[:tags].each do |key, value|
+ if value.blank?
+ new_hash[:tags].delete(key)
+ else
+ new_hash[:tags][key] = escape_value(value)
+ end
end
- end
- new_hash
+ new_hash
+ end
end
- end
-
- def escape_value(value)
- value.to_s.gsub('=', '\\=')
- end
-
- # Measures the execution time of a block.
- #
- # Example:
- #
- # Gitlab::Metrics.measure(:find_by_username_duration) do
- # User.find_by_username(some_username)
- # end
- #
- # name - The name of the field to store the execution time in.
- #
- # Returns the value yielded by the supplied block.
- def measure(name)
- trans = current_transaction
- return yield unless trans
-
- real_start = Time.now.to_f
- cpu_start = System.cpu_time
-
- retval = yield
-
- cpu_stop = System.cpu_time
- real_stop = Time.now.to_f
+ def escape_value(value)
+ value.to_s.gsub('=', '\\=')
+ end
- real_time = (real_stop - real_start) * 1000.0
- cpu_time = cpu_stop - cpu_start
+ # Measures the execution time of a block.
+ #
+ # Example:
+ #
+ # Gitlab::Metrics.measure(:find_by_username_duration) do
+ # User.find_by_username(some_username)
+ # end
+ #
+ # name - The name of the field to store the execution time in.
+ #
+ # Returns the value yielded by the supplied block.
+ def measure(name)
+ trans = current_transaction
+
+ return yield unless trans
+
+ real_start = Time.now.to_f
+ cpu_start = System.cpu_time
+
+ retval = yield
+
+ cpu_stop = System.cpu_time
+ real_stop = Time.now.to_f
+
+ real_time = (real_stop - real_start)
+ cpu_time = cpu_stop - cpu_start
+
+ real_duration_seconds = fetch_histogram("gitlab_#{name}_real_duration_seconds".to_sym) do
+ docstring "Measure #{name}"
+ base_labels Transaction::BASE_LABELS
+ buckets EXECUTION_MEASUREMENT_BUCKETS
+ end
- trans.increment("#{name}_real_time", real_time)
- trans.increment("#{name}_cpu_time", cpu_time)
- trans.increment("#{name}_call_count", 1)
+ real_duration_seconds.observe(trans.labels, real_time)
- retval
- end
+ cpu_duration_seconds = fetch_histogram("gitlab_#{name}_cpu_duration_seconds".to_sym) do
+ docstring "Measure #{name}"
+ base_labels Transaction::BASE_LABELS
+ buckets EXECUTION_MEASUREMENT_BUCKETS
+ with_feature "prometheus_metrics_measure_#{name}_cpu_duration"
+ end
+ cpu_duration_seconds.observe(trans.labels, cpu_time)
- # Adds a tag to the current transaction (if any)
- #
- # name - The name of the tag to add.
- # value - The value of the tag.
- def tag_transaction(name, value)
- trans = current_transaction
+ # InfluxDB stores the _real_time and _cpu_time time values as milliseconds
+ trans.increment("#{name}_real_time", real_time.in_milliseconds, false)
+ trans.increment("#{name}_cpu_time", cpu_time.in_milliseconds, false)
+ trans.increment("#{name}_call_count", 1, false)
- trans&.add_tag(name, value)
- end
+ retval
+ end
- # Sets the action of the current transaction (if any)
- #
- # action - The name of the action.
- def action=(action)
- trans = current_transaction
+ # Sets the action of the current transaction (if any)
+ #
+ # action - The name of the action.
+ def action=(action)
+ trans = current_transaction
- trans&.action = action
- end
+ trans&.action = action
+ end
- # Tracks an event.
- #
- # See `Gitlab::Metrics::Transaction#add_event` for more details.
- def add_event(*args)
- trans = current_transaction
+ # Tracks an event.
+ #
+ # See `Gitlab::Metrics::Transaction#add_event` for more details.
+ def add_event(*args)
+ trans = current_transaction
- trans&.add_event(*args)
- end
+ trans&.add_event(*args)
+ end
- # Returns the prefix to use for the name of a series.
- def series_prefix
- @series_prefix ||= Sidekiq.server? ? 'sidekiq_' : 'rails_'
- end
+ # Returns the prefix to use for the name of a series.
+ def series_prefix
+ @series_prefix ||= Sidekiq.server? ? 'sidekiq_' : 'rails_'
+ end
- # Allow access from other metrics related middlewares
- def current_transaction
- Transaction.current
- end
+ # Allow access from other metrics related middlewares
+ def current_transaction
+ Transaction.current
+ end
- # When enabled this should be set before being used as the usual pattern
- # "@foo ||= bar" is _not_ thread-safe.
- def pool
- if influx_metrics_enabled?
- if @pool.nil?
- MUTEX.synchronize do
- @pool ||= ConnectionPool.new(size: settings[:pool_size], timeout: settings[:timeout]) do
- host = settings[:host]
- port = settings[:port]
-
- InfluxDB::Client
- .new(udp: { host: host, port: port })
+ # When enabled this should be set before being used as the usual pattern
+ # "@foo ||= bar" is _not_ thread-safe.
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def pool
+ if influx_metrics_enabled?
+ if @pool.nil?
+ MUTEX.synchronize do
+ @pool ||= ConnectionPool.new(size: settings[:pool_size], timeout: settings[:timeout]) do
+ host = settings[:host]
+ port = settings[:port]
+
+ InfluxDB::Client
+ .new(udp: { host: host, port: port })
+ end
end
end
+
+ @pool
end
- @pool
end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
end
end
end
diff --git a/lib/gitlab/metrics/influx_sampler.rb b/lib/gitlab/metrics/influx_sampler.rb
deleted file mode 100644
index 6db1dd755b7..00000000000
--- a/lib/gitlab/metrics/influx_sampler.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-module Gitlab
- module Metrics
- # Class that sends certain metrics to InfluxDB at a specific interval.
- #
- # This class is used to gather statistics that can't be directly associated
- # with a transaction such as system memory usage, garbage collection
- # statistics, etc.
- class InfluxSampler < BaseSampler
- # interval - The sampling interval in seconds.
- def initialize(interval = Metrics.settings[:sample_interval])
- super(interval)
- @last_step = nil
-
- @metrics = []
-
- @last_minor_gc = Delta.new(GC.stat[:minor_gc_count])
- @last_major_gc = Delta.new(GC.stat[:major_gc_count])
-
- if Gitlab::Metrics.mri?
- require 'allocations'
-
- Allocations.start
- end
- end
-
- def sample
- sample_memory_usage
- sample_file_descriptors
- sample_objects
- sample_gc
-
- flush
- ensure
- GC::Profiler.clear
- @metrics.clear
- end
-
- def flush
- Metrics.submit_metrics(@metrics.map(&:to_hash))
- end
-
- def sample_memory_usage
- add_metric('memory_usage', value: System.memory_usage)
- end
-
- def sample_file_descriptors
- add_metric('file_descriptors', value: System.file_descriptor_count)
- end
-
- if Metrics.mri?
- def sample_objects
- sample = Allocations.to_hash
- counts = sample.each_with_object({}) do |(klass, count), hash|
- name = klass.name
-
- next unless name
-
- hash[name] = count
- end
-
- # Symbols aren't allocated so we'll need to add those manually.
- counts['Symbol'] = Symbol.all_symbols.length
-
- counts.each do |name, count|
- add_metric('object_counts', { count: count }, type: name)
- end
- end
- else
- def sample_objects
- end
- end
-
- def sample_gc
- time = GC::Profiler.total_time * 1000.0
- stats = GC.stat.merge(total_time: time)
-
- # We want the difference of GC runs compared to the last sample, not the
- # total amount since the process started.
- stats[:minor_gc_count] =
- @last_minor_gc.compared_with(stats[:minor_gc_count])
-
- stats[:major_gc_count] =
- @last_major_gc.compared_with(stats[:major_gc_count])
-
- stats[:count] = stats[:minor_gc_count] + stats[:major_gc_count]
-
- add_metric('gc_statistics', stats)
- end
-
- def add_metric(series, values, tags = {})
- prefix = sidekiq? ? 'sidekiq_' : 'rails_'
-
- @metrics << Metric.new("#{prefix}#{series}", values, tags)
- end
-
- def sidekiq?
- Sidekiq.server?
- end
- end
- end
-end
diff --git a/lib/gitlab/metrics/instrumentation.rb b/lib/gitlab/metrics/instrumentation.rb
index 6aa38542cb4..023e9963493 100644
--- a/lib/gitlab/metrics/instrumentation.rb
+++ b/lib/gitlab/metrics/instrumentation.rb
@@ -118,19 +118,21 @@ module Gitlab
def self.instrument(type, mod, name)
return unless Metrics.enabled?
- name = name.to_sym
+ name = name.to_sym
target = type == :instance ? mod : mod.singleton_class
if type == :instance
target = mod
- label = "#{mod.name}##{name}"
+ method_name = "##{name}"
method = mod.instance_method(name)
else
target = mod.singleton_class
- label = "#{mod.name}.#{name}"
+ method_name = ".#{name}"
method = mod.method(name)
end
+ label = "#{mod.name}#{method_name}"
+
unless instrumented?(target)
target.instance_variable_set(PROXY_IVAR, Module.new)
end
@@ -153,7 +155,8 @@ module Gitlab
proxy_module.class_eval <<-EOF, __FILE__, __LINE__ + 1
def #{name}(#{args_signature})
if trans = Gitlab::Metrics::Instrumentation.transaction
- trans.method_call_for(#{label.to_sym.inspect}).measure { super }
+ trans.method_call_for(#{label.to_sym.inspect}, #{mod.name.inspect}, "#{method_name}")
+ .measure { super }
else
super
end
diff --git a/lib/gitlab/metrics/method_call.rb b/lib/gitlab/metrics/method_call.rb
index d3465e5ec19..b11520a79bb 100644
--- a/lib/gitlab/metrics/method_call.rb
+++ b/lib/gitlab/metrics/method_call.rb
@@ -1,18 +1,31 @@
+# rubocop:disable Style/ClassVars
+
module Gitlab
module Metrics
# Class for tracking timing information about method calls
class MethodCall
- attr_reader :real_time, :cpu_time, :call_count
+ include Gitlab::Metrics::Methods
+ BASE_LABELS = { module: nil, method: nil }.freeze
+ attr_reader :real_time, :cpu_time, :call_count, :labels
+
+ define_histogram :gitlab_method_call_duration_seconds do
+ docstring 'Method calls real duration'
+ base_labels Transaction::BASE_LABELS.merge(BASE_LABELS)
+ buckets [0.01, 0.05, 0.1, 0.5, 1]
+ with_feature :prometheus_metrics_method_instrumentation
+ end
# name - The full name of the method (including namespace) such as
# `User#sign_in`.
#
- # series - The series to use for storing the data.
- def initialize(name, series)
+ def initialize(name, module_name, method_name, transaction)
+ @module_name = module_name
+ @method_name = method_name
+ @transaction = transaction
@name = name
- @series = series
- @real_time = 0
- @cpu_time = 0
+ @labels = { module: @module_name, method: @method_name }
+ @real_time = 0.0
+ @cpu_time = 0.0
@call_count = 0
end
@@ -22,21 +35,28 @@ module Gitlab
start_cpu = System.cpu_time
retval = yield
- @real_time += System.monotonic_time - start_real
- @cpu_time += System.cpu_time - start_cpu
+ real_time = System.monotonic_time - start_real
+ cpu_time = System.cpu_time - start_cpu
+
+ @real_time += real_time
+ @cpu_time += cpu_time
@call_count += 1
+ if above_threshold?
+ self.class.gitlab_method_call_duration_seconds.observe(@transaction.labels.merge(labels), real_time)
+ end
+
retval
end
# Returns a Metric instance of the current method call.
def to_metric
Metric.new(
- @series,
+ Instrumentation.series,
{
- duration: real_time,
- cpu_duration: cpu_time,
- call_count: call_count
+ duration: real_time.in_milliseconds.to_i,
+ cpu_duration: cpu_time.in_milliseconds.to_i,
+ call_count: call_count
},
method: @name
)
@@ -45,7 +65,7 @@ module Gitlab
# Returns true if the total runtime of this method exceeds the method call
# threshold.
def above_threshold?
- real_time >= Metrics.method_call_threshold
+ real_time.in_milliseconds >= Metrics.method_call_threshold
end
end
end
diff --git a/lib/gitlab/metrics/methods.rb b/lib/gitlab/metrics/methods.rb
new file mode 100644
index 00000000000..cd7c1e507f7
--- /dev/null
+++ b/lib/gitlab/metrics/methods.rb
@@ -0,0 +1,129 @@
+# rubocop:disable Style/ClassVars
+
+module Gitlab
+ module Metrics
+ module Methods
+ extend ActiveSupport::Concern
+
+ included do
+ @@_metric_provider_mutex ||= Mutex.new
+ @@_metrics_provider_cache = {}
+ end
+
+ class_methods do
+ def reload_metric!(name)
+ @@_metrics_provider_cache.delete(name)
+ end
+
+ private
+
+ def define_metric(type, name, opts = {}, &block)
+ if respond_to?(name)
+ raise ArgumentError, "method #{name} already exists"
+ end
+
+ define_singleton_method(name) do
+ # inlining fetch_metric method to avoid method call overhead when instrumenting hot spots
+ @@_metrics_provider_cache[name] || init_metric(type, name, opts, &block)
+ end
+ end
+
+ def fetch_metric(type, name, opts = {}, &block)
+ @@_metrics_provider_cache[name] || init_metric(type, name, opts, &block)
+ end
+
+ def init_metric(type, name, opts = {}, &block)
+ options = MetricOptions.new(opts)
+ options.evaluate(&block)
+
+ if disabled_by_feature(options)
+ synchronized_cache_fill(name) { NullMetric.instance }
+ else
+ synchronized_cache_fill(name) { build_metric!(type, name, options) }
+ end
+ end
+
+ def synchronized_cache_fill(key)
+ @@_metric_provider_mutex.synchronize do
+ @@_metrics_provider_cache[key] ||= yield
+ end
+ end
+
+ def disabled_by_feature(options)
+ options.with_feature && !Feature.get(options.with_feature).enabled?
+ end
+
+ def build_metric!(type, name, options)
+ case type
+ when :gauge
+ Gitlab::Metrics.gauge(name, options.docstring, options.base_labels, options.multiprocess_mode)
+ when :counter
+ Gitlab::Metrics.counter(name, options.docstring, options.base_labels)
+ when :histogram
+ Gitlab::Metrics.histogram(name, options.docstring, options.base_labels, options.buckets)
+ when :summary
+ raise NotImplementedError, "summary metrics are not currently supported"
+ else
+ raise ArgumentError, "uknown metric type #{type}"
+ end
+ end
+
+ # Fetch and/or initialize counter metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_counter(name, opts = {}, &block)
+ fetch_metric(:counter, name, opts, &block)
+ end
+
+ # Fetch and/or initialize gauge metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_gauge(name, opts = {}, &block)
+ fetch_metric(:gauge, name, opts, &block)
+ end
+
+ # Fetch and/or initialize histogram metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_histogram(name, opts = {}, &block)
+ fetch_metric(:histogram, name, opts, &block)
+ end
+
+ # Fetch and/or initialize summary metric
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def fetch_summary(name, opts = {}, &block)
+ fetch_metric(:summary, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Counter
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_counter(name, opts = {}, &block)
+ define_metric(:counter, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Gauge
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_gauge(name, opts = {}, &block)
+ define_metric(:gauge, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Histogram
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_histogram(name, opts = {}, &block)
+ define_metric(:histogram, name, opts, &block)
+ end
+
+ # Define metric accessor method for a Summary
+ # @param [Symbol] name
+ # @param [Hash] opts
+ def define_summary(name, opts = {}, &block)
+ define_metric(:summary, name, opts, &block)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/methods/metric_options.rb b/lib/gitlab/metrics/methods/metric_options.rb
new file mode 100644
index 00000000000..70e122d4e15
--- /dev/null
+++ b/lib/gitlab/metrics/methods/metric_options.rb
@@ -0,0 +1,61 @@
+module Gitlab
+ module Metrics
+ module Methods
+ class MetricOptions
+ SMALL_NETWORK_BUCKETS = [0.005, 0.01, 0.1, 1, 10].freeze
+
+ def initialize(options = {})
+ @multiprocess_mode = options[:multiprocess_mode] || :all
+ @buckets = options[:buckets] || SMALL_NETWORK_BUCKETS
+ @base_labels = options[:base_labels] || {}
+ @docstring = options[:docstring]
+ @with_feature = options[:with_feature]
+ end
+
+ # Documentation describing metric in metrics endpoint '/-/metrics'
+ def docstring(docstring = nil)
+ @docstring = docstring unless docstring.nil?
+
+ @docstring
+ end
+
+ # Gauge aggregation mode for multiprocess metrics
+ # - :all (default) returns each gauge for every process
+ # - :livesum all process'es gauges summed up
+ # - :max maximum value of per process gauges
+ # - :min minimum value of per process gauges
+ def multiprocess_mode(mode = nil)
+ @multiprocess_mode = mode unless mode.nil?
+
+ @multiprocess_mode
+ end
+
+ # Measurement buckets for histograms
+ def buckets(buckets = nil)
+ @buckets = buckets unless buckets.nil?
+
+ @buckets
+ end
+
+ # Base labels are merged with per metric labels
+ def base_labels(base_labels = nil)
+ @base_labels = base_labels unless base_labels.nil?
+
+ @base_labels
+ end
+
+ # Use feature toggle to control whether certain metric is enabled/disabled
+ def with_feature(name = nil)
+ @with_feature = name unless name.nil?
+
+ @with_feature
+ end
+
+ def evaluate(&block)
+ instance_eval(&block) if block_given?
+ self
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/null_metric.rb b/lib/gitlab/metrics/null_metric.rb
index 3b5a2907195..aabada5c21a 100644
--- a/lib/gitlab/metrics/null_metric.rb
+++ b/lib/gitlab/metrics/null_metric.rb
@@ -2,6 +2,8 @@ module Gitlab
module Metrics
# Mocks ::Prometheus::Client::Metric and all derived metrics
class NullMetric
+ include Singleton
+
def method_missing(name, *args, &block)
nil
end
diff --git a/lib/gitlab/metrics/prometheus.rb b/lib/gitlab/metrics/prometheus.rb
index 460dab47276..d12ba0ec176 100644
--- a/lib/gitlab/metrics/prometheus.rb
+++ b/lib/gitlab/metrics/prometheus.rb
@@ -3,54 +3,76 @@ require 'prometheus/client'
module Gitlab
module Metrics
module Prometheus
- include Gitlab::CurrentSettings
+ extend ActiveSupport::Concern
- def metrics_folder_present?
- multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir
+ REGISTRY_MUTEX = Mutex.new
+ PROVIDER_MUTEX = Mutex.new
- multiprocess_files_dir &&
- ::Dir.exist?(multiprocess_files_dir) &&
- ::File.writable?(multiprocess_files_dir)
- end
+ class_methods do
+ include Gitlab::Utils::StrongMemoize
- def prometheus_metrics_enabled?
- return @prometheus_metrics_enabled if defined?(@prometheus_metrics_enabled)
+ def metrics_folder_present?
+ multiprocess_files_dir = ::Prometheus::Client.configuration.multiprocess_files_dir
- @prometheus_metrics_enabled = prometheus_metrics_enabled_unmemoized
- end
+ multiprocess_files_dir &&
+ ::Dir.exist?(multiprocess_files_dir) &&
+ ::File.writable?(multiprocess_files_dir)
+ end
- def registry
- @registry ||= ::Prometheus::Client.registry
- end
+ def prometheus_metrics_enabled?
+ strong_memoize(:prometheus_metrics_enabled) do
+ prometheus_metrics_enabled_unmemoized
+ end
+ end
- def counter(name, docstring, base_labels = {})
- provide_metric(name) || registry.counter(name, docstring, base_labels)
- end
+ def registry
+ strong_memoize(:registry) do
+ REGISTRY_MUTEX.synchronize do
+ strong_memoize(:registry) do
+ ::Prometheus::Client.registry
+ end
+ end
+ end
+ end
- def summary(name, docstring, base_labels = {})
- provide_metric(name) || registry.summary(name, docstring, base_labels)
- end
+ def counter(name, docstring, base_labels = {})
+ safe_provide_metric(:counter, name, docstring, base_labels)
+ end
- def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all)
- provide_metric(name) || registry.gauge(name, docstring, base_labels, multiprocess_mode)
- end
+ def summary(name, docstring, base_labels = {})
+ safe_provide_metric(:summary, name, docstring, base_labels)
+ end
- def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS)
- provide_metric(name) || registry.histogram(name, docstring, base_labels, buckets)
- end
+ def gauge(name, docstring, base_labels = {}, multiprocess_mode = :all)
+ safe_provide_metric(:gauge, name, docstring, base_labels, multiprocess_mode)
+ end
- def provide_metric(name)
- if prometheus_metrics_enabled?
- registry.get(name)
- else
- NullMetric.new
+ def histogram(name, docstring, base_labels = {}, buckets = ::Prometheus::Client::Histogram::DEFAULT_BUCKETS)
+ safe_provide_metric(:histogram, name, docstring, base_labels, buckets)
end
- end
- private
+ private
+
+ def safe_provide_metric(method, name, *args)
+ metric = provide_metric(name)
+ return metric if metric
- def prometheus_metrics_enabled_unmemoized
- metrics_folder_present? && current_application_settings[:prometheus_metrics_enabled] || false
+ PROVIDER_MUTEX.synchronize do
+ provide_metric(name) || registry.method(method).call(name, *args)
+ end
+ end
+
+ def provide_metric(name)
+ if prometheus_metrics_enabled?
+ registry.get(name)
+ else
+ NullMetric.instance
+ end
+ end
+
+ def prometheus_metrics_enabled_unmemoized
+ metrics_folder_present? && Gitlab::CurrentSettings.prometheus_metrics_enabled || false
+ end
end
end
end
diff --git a/lib/gitlab/metrics/rack_middleware.rb b/lib/gitlab/metrics/rack_middleware.rb
index adc0db1a874..2d45765df3f 100644
--- a/lib/gitlab/metrics/rack_middleware.rb
+++ b/lib/gitlab/metrics/rack_middleware.rb
@@ -2,20 +2,6 @@ module Gitlab
module Metrics
# Rack middleware for tracking Rails and Grape requests.
class RackMiddleware
- CONTROLLER_KEY = 'action_controller.instance'.freeze
- ENDPOINT_KEY = 'api.endpoint'.freeze
- CONTENT_TYPES = {
- 'text/html' => :html,
- 'text/plain' => :txt,
- 'application/json' => :json,
- 'text/js' => :js,
- 'application/atom+xml' => :atom,
- 'image/png' => :png,
- 'image/jpeg' => :jpeg,
- 'image/gif' => :gif,
- 'image/svg+xml' => :svg
- }.freeze
-
def initialize(app)
@app = app
end
@@ -35,12 +21,6 @@ module Gitlab
# Even in the event of an error we want to submit any metrics we
# might've gathered up to this point.
ensure
- if env[CONTROLLER_KEY]
- tag_controller(trans, env)
- elsif env[ENDPOINT_KEY]
- tag_endpoint(trans, env)
- end
-
trans.finish
end
@@ -48,60 +28,19 @@ module Gitlab
end
def transaction_from_env(env)
- trans = Transaction.new
+ trans = WebTransaction.new(env)
- trans.set(:request_uri, filtered_path(env))
- trans.set(:request_method, env['REQUEST_METHOD'])
+ trans.set(:request_uri, filtered_path(env), false)
+ trans.set(:request_method, env['REQUEST_METHOD'], false)
trans
end
- def tag_controller(trans, env)
- controller = env[CONTROLLER_KEY]
- action = "#{controller.class.name}##{controller.action_name}"
- suffix = CONTENT_TYPES[controller.content_type]
-
- if suffix && suffix != :html
- action += ".#{suffix}"
- end
-
- trans.action = action
- end
-
- def tag_endpoint(trans, env)
- endpoint = env[ENDPOINT_KEY]
-
- begin
- route = endpoint.route
- rescue
- # endpoint.route is calling env[Grape::Env::GRAPE_ROUTING_ARGS][:route_info]
- # but env[Grape::Env::GRAPE_ROUTING_ARGS] is nil in the case of a 405 response
- # so we're rescuing exceptions and bailing out
- end
-
- if route
- path = endpoint_paths_cache[route.request_method][route.path]
- trans.action = "Grape##{route.request_method} #{path}"
- end
- end
-
private
def filtered_path(env)
ActionDispatch::Request.new(env).filtered_path.presence || env['REQUEST_URI']
end
-
- def endpoint_paths_cache
- @endpoint_paths_cache ||= Hash.new do |hash, http_method|
- hash[http_method] = Hash.new do |inner_hash, raw_path|
- inner_hash[raw_path] = endpoint_instrumentable_path(raw_path)
- end
- end
- end
-
- def endpoint_instrumentable_path(raw_path)
- raw_path.sub('(.:format)', '').sub('/:version', '')
- end
end
end
end
diff --git a/lib/gitlab/metrics/samplers/base_sampler.rb b/lib/gitlab/metrics/samplers/base_sampler.rb
new file mode 100644
index 00000000000..37f90c4673d
--- /dev/null
+++ b/lib/gitlab/metrics/samplers/base_sampler.rb
@@ -0,0 +1,64 @@
+require 'logger'
+
+module Gitlab
+ module Metrics
+ module Samplers
+ class BaseSampler < Daemon
+ # interval - The sampling interval in seconds.
+ def initialize(interval)
+ interval_half = interval.to_f / 2
+
+ @interval = interval
+ @interval_steps = (-interval_half..interval_half).step(0.1).to_a
+
+ super()
+ end
+
+ def safe_sample
+ sample
+ rescue => e
+ Rails.logger.warn("#{self.class}: #{e}, stopping")
+ stop
+ end
+
+ def sample
+ raise NotImplementedError
+ end
+
+ # Returns the sleep interval with a random adjustment.
+ #
+ # The random adjustment is put in place to ensure we:
+ #
+ # 1. Don't generate samples at the exact same interval every time (thus
+ # potentially missing anything that happens in between samples).
+ # 2. Don't sample data at the same interval two times in a row.
+ def sleep_interval
+ while step = @interval_steps.sample
+ if step != @last_step
+ @last_step = step
+
+ return @interval + @last_step
+ end
+ end
+ end
+
+ private
+
+ attr_reader :running
+
+ def start_working
+ @running = true
+ sleep(sleep_interval)
+ while running
+ safe_sample
+ sleep(sleep_interval)
+ end
+ end
+
+ def stop_working
+ @running = false
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/samplers/influx_sampler.rb b/lib/gitlab/metrics/samplers/influx_sampler.rb
new file mode 100644
index 00000000000..5a0f7f28fc8
--- /dev/null
+++ b/lib/gitlab/metrics/samplers/influx_sampler.rb
@@ -0,0 +1,79 @@
+module Gitlab
+ module Metrics
+ module Samplers
+ # Class that sends certain metrics to InfluxDB at a specific interval.
+ #
+ # This class is used to gather statistics that can't be directly associated
+ # with a transaction such as system memory usage, garbage collection
+ # statistics, etc.
+ class InfluxSampler < BaseSampler
+ # interval - The sampling interval in seconds.
+ def initialize(interval = Metrics.settings[:sample_interval])
+ super(interval)
+ @last_step = nil
+
+ @metrics = []
+
+ @last_minor_gc = Delta.new(GC.stat[:minor_gc_count])
+ @last_major_gc = Delta.new(GC.stat[:major_gc_count])
+
+ if Gitlab::Metrics.mri?
+ require 'allocations'
+
+ Allocations.start
+ end
+ end
+
+ def sample
+ sample_memory_usage
+ sample_file_descriptors
+ sample_gc
+
+ flush
+ ensure
+ GC::Profiler.clear
+ @metrics.clear
+ end
+
+ def flush
+ Metrics.submit_metrics(@metrics.map(&:to_hash))
+ end
+
+ def sample_memory_usage
+ add_metric('memory_usage', value: System.memory_usage)
+ end
+
+ def sample_file_descriptors
+ add_metric('file_descriptors', value: System.file_descriptor_count)
+ end
+
+ def sample_gc
+ time = GC::Profiler.total_time * 1000.0
+ stats = GC.stat.merge(total_time: time)
+
+ # We want the difference of GC runs compared to the last sample, not the
+ # total amount since the process started.
+ stats[:minor_gc_count] =
+ @last_minor_gc.compared_with(stats[:minor_gc_count])
+
+ stats[:major_gc_count] =
+ @last_major_gc.compared_with(stats[:major_gc_count])
+
+ stats[:count] = stats[:minor_gc_count] + stats[:major_gc_count]
+
+ add_metric('gc_statistics', stats)
+ end
+
+ def add_metric(series, values, tags = {})
+ prefix = sidekiq? ? 'sidekiq_' : 'rails_'
+
+ @metrics << Metric.new("#{prefix}#{series}", values, tags)
+ end
+
+ def sidekiq?
+ Sidekiq.server?
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/samplers/ruby_sampler.rb b/lib/gitlab/metrics/samplers/ruby_sampler.rb
new file mode 100644
index 00000000000..4e1ea62351f
--- /dev/null
+++ b/lib/gitlab/metrics/samplers/ruby_sampler.rb
@@ -0,0 +1,84 @@
+require 'prometheus/client/support/unicorn'
+
+module Gitlab
+ module Metrics
+ module Samplers
+ class RubySampler < BaseSampler
+ def metrics
+ @metrics ||= init_metrics
+ end
+
+ def with_prefix(prefix, name)
+ "ruby_#{prefix}_#{name}".to_sym
+ end
+
+ def to_doc_string(name)
+ name.to_s.humanize
+ end
+
+ def labels
+ {}
+ end
+
+ def initialize(interval)
+ super(interval)
+
+ if Metrics.mri?
+ require 'allocations'
+
+ Allocations.start
+ end
+ end
+
+ def init_metrics
+ metrics = {}
+ metrics[:sampler_duration] = Metrics.histogram(with_prefix(:sampler_duration, :seconds), 'Sampler time', { worker: nil })
+ metrics[:total_time] = Metrics.gauge(with_prefix(:gc, :time_total), 'Total GC time', labels, :livesum)
+ GC.stat.keys.each do |key|
+ metrics[key] = Metrics.gauge(with_prefix(:gc, key), to_doc_string(key), labels, :livesum)
+ end
+
+ metrics[:objects_total] = Metrics.gauge(with_prefix(:objects, :total), 'Objects total', labels.merge(class: nil), :livesum)
+ metrics[:memory_usage] = Metrics.gauge(with_prefix(:memory, :usage_total), 'Memory used total', labels, :livesum)
+ metrics[:file_descriptors] = Metrics.gauge(with_prefix(:file, :descriptors_total), 'File descriptors total', labels, :livesum)
+
+ metrics
+ end
+
+ def sample
+ start_time = System.monotonic_time
+ sample_gc
+
+ metrics[:memory_usage].set(labels, System.memory_usage)
+ metrics[:file_descriptors].set(labels, System.file_descriptor_count)
+
+ metrics[:sampler_duration].observe(labels.merge(worker_label), System.monotonic_time - start_time)
+ ensure
+ GC::Profiler.clear
+ end
+
+ private
+
+ def sample_gc
+ metrics[:total_time].set(labels, GC::Profiler.total_time * 1000)
+
+ GC.stat.each do |key, value|
+ metrics[key].set(labels, value)
+ end
+ end
+
+ def worker_label
+ return {} unless defined?(Unicorn::Worker)
+
+ worker_no = ::Prometheus::Client::Support::Unicorn.worker_id
+
+ if worker_no
+ { worker: worker_no }
+ else
+ { worker: 'master' }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/samplers/unicorn_sampler.rb b/lib/gitlab/metrics/samplers/unicorn_sampler.rb
new file mode 100644
index 00000000000..ea325651fbb
--- /dev/null
+++ b/lib/gitlab/metrics/samplers/unicorn_sampler.rb
@@ -0,0 +1,50 @@
+module Gitlab
+ module Metrics
+ module Samplers
+ class UnicornSampler < BaseSampler
+ def initialize(interval)
+ super(interval)
+ end
+
+ def unicorn_active_connections
+ @unicorn_active_connections ||= Gitlab::Metrics.gauge(:unicorn_active_connections, 'Unicorn active connections', {}, :max)
+ end
+
+ def unicorn_queued_connections
+ @unicorn_queued_connections ||= Gitlab::Metrics.gauge(:unicorn_queued_connections, 'Unicorn queued connections', {}, :max)
+ end
+
+ def enabled?
+ # Raindrops::Linux.tcp_listener_stats is only present on Linux
+ unicorn_with_listeners? && Raindrops::Linux.respond_to?(:tcp_listener_stats)
+ end
+
+ def sample
+ Raindrops::Linux.tcp_listener_stats(tcp_listeners).each do |addr, stats|
+ unicorn_active_connections.set({ type: 'tcp', address: addr }, stats.active)
+ unicorn_queued_connections.set({ type: 'tcp', address: addr }, stats.queued)
+ end
+
+ Raindrops::Linux.unix_listener_stats(unix_listeners).each do |addr, stats|
+ unicorn_active_connections.set({ type: 'unix', address: addr }, stats.active)
+ unicorn_queued_connections.set({ type: 'unix', address: addr }, stats.queued)
+ end
+ end
+
+ private
+
+ def tcp_listeners
+ @tcp_listeners ||= Unicorn.listener_names.grep(%r{\A[^/]+:\d+\z})
+ end
+
+ def unix_listeners
+ @unix_listeners ||= Unicorn.listener_names - tcp_listeners
+ end
+
+ def unicorn_with_listeners?
+ defined?(Unicorn) && Unicorn.listener_names.any?
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/sidekiq_metrics_exporter.rb b/lib/gitlab/metrics/sidekiq_metrics_exporter.rb
index 5980a4ded2b..db8bdde74b2 100644
--- a/lib/gitlab/metrics/sidekiq_metrics_exporter.rb
+++ b/lib/gitlab/metrics/sidekiq_metrics_exporter.rb
@@ -23,7 +23,7 @@ module Gitlab
end
def stop_working
- server.shutdown
+ server.shutdown if server
@server = nil
end
diff --git a/lib/gitlab/metrics/sidekiq_middleware.rb b/lib/gitlab/metrics/sidekiq_middleware.rb
index f9dd8e41912..df4bdf16847 100644
--- a/lib/gitlab/metrics/sidekiq_middleware.rb
+++ b/lib/gitlab/metrics/sidekiq_middleware.rb
@@ -5,7 +5,7 @@ module Gitlab
# This middleware is intended to be used as a server-side middleware.
class SidekiqMiddleware
def call(worker, message, queue)
- trans = Transaction.new("#{worker.class.name}#perform")
+ trans = BackgroundTransaction.new(worker.class)
begin
# Old gitlad-shell messages don't provide enqueued_at/created_at attributes
diff --git a/lib/gitlab/metrics/subscribers/action_view.rb b/lib/gitlab/metrics/subscribers/action_view.rb
index d435a33e9c7..b600e8a2a50 100644
--- a/lib/gitlab/metrics/subscribers/action_view.rb
+++ b/lib/gitlab/metrics/subscribers/action_view.rb
@@ -3,6 +3,14 @@ module Gitlab
module Subscribers
# Class for tracking the rendering timings of views.
class ActionView < ActiveSupport::Subscriber
+ include Gitlab::Metrics::Methods
+ define_histogram :gitlab_view_rendering_duration_seconds do
+ docstring 'View rendering time'
+ base_labels Transaction::BASE_LABELS.merge({ path: nil })
+ buckets [0.001, 0.01, 0.1, 1, 10.0]
+ with_feature :prometheus_metrics_view_instrumentation
+ end
+
attach_to :action_view
SERIES = 'views'.freeze
@@ -19,12 +27,14 @@ module Gitlab
values = values_for(event)
tags = tags_for(event)
+ self.class.gitlab_view_rendering_duration_seconds.observe(current_transaction.labels.merge(tags), event.duration)
+
current_transaction.increment(:view_duration, event.duration)
current_transaction.add_metric(SERIES, values, tags)
end
def relative_path(path)
- path.gsub(/^#{Rails.root.to_s}\/?/, '')
+ path.gsub(%r{^#{Rails.root.to_s}/?}, '')
end
def values_for(event)
diff --git a/lib/gitlab/metrics/subscribers/active_record.rb b/lib/gitlab/metrics/subscribers/active_record.rb
index 96cad941d5c..4b3e8d0a6a0 100644
--- a/lib/gitlab/metrics/subscribers/active_record.rb
+++ b/lib/gitlab/metrics/subscribers/active_record.rb
@@ -3,17 +3,26 @@ module Gitlab
module Subscribers
# Class for tracking the total query duration of a transaction.
class ActiveRecord < ActiveSupport::Subscriber
+ include Gitlab::Metrics::Methods
attach_to :active_record
def sql(event)
return unless current_transaction
- current_transaction.increment(:sql_duration, event.duration)
- current_transaction.increment(:sql_count, 1)
+ self.class.gitlab_sql_duration_seconds.observe(current_transaction.labels, event.duration / 1000.0)
+
+ current_transaction.increment(:sql_duration, event.duration, false)
+ current_transaction.increment(:sql_count, 1, false)
end
private
+ define_histogram :gitlab_sql_duration_seconds do
+ docstring 'SQL time'
+ base_labels Transaction::BASE_LABELS
+ buckets [0.001, 0.01, 0.1, 1.0, 10.0]
+ end
+
def current_transaction
Transaction.current
end
diff --git a/lib/gitlab/metrics/subscribers/rails_cache.rb b/lib/gitlab/metrics/subscribers/rails_cache.rb
index aaed2184f44..250897a79c2 100644
--- a/lib/gitlab/metrics/subscribers/rails_cache.rb
+++ b/lib/gitlab/metrics/subscribers/rails_cache.rb
@@ -7,28 +7,29 @@ module Gitlab
attach_to :active_support
def cache_read(event)
- increment(:cache_read, event.duration)
+ observe(:read, event.duration)
return unless current_transaction
return if event.payload[:super_operation] == :fetch
if event.payload[:hit]
- current_transaction.increment(:cache_read_hit_count, 1)
+ current_transaction.increment(:cache_read_hit_count, 1, false)
else
- current_transaction.increment(:cache_read_miss_count, 1)
+ metric_cache_misses_total.increment(current_transaction.labels)
+ current_transaction.increment(:cache_read_miss_count, 1, false)
end
end
def cache_write(event)
- increment(:cache_write, event.duration)
+ observe(:write, event.duration)
end
def cache_delete(event)
- increment(:cache_delete, event.duration)
+ observe(:delete, event.duration)
end
def cache_exist?(event)
- increment(:cache_exists, event.duration)
+ observe(:exists, event.duration)
end
def cache_fetch_hit(event)
@@ -40,16 +41,18 @@ module Gitlab
def cache_generate(event)
return unless current_transaction
+ metric_cache_misses_total.increment(current_transaction.labels)
current_transaction.increment(:cache_read_miss_count, 1)
end
- def increment(key, duration)
+ def observe(key, duration)
return unless current_transaction
- current_transaction.increment(:cache_duration, duration)
- current_transaction.increment(:cache_count, 1)
- current_transaction.increment("#{key}_duration".to_sym, duration)
- current_transaction.increment("#{key}_count".to_sym, 1)
+ metric_cache_operation_duration_seconds.observe(current_transaction.labels.merge({ operation: key }), duration / 1000.0)
+ current_transaction.increment(:cache_duration, duration, false)
+ current_transaction.increment(:cache_count, 1, false)
+ current_transaction.increment("cache_#{key}_duration".to_sym, duration, false)
+ current_transaction.increment("cache_#{key}_count".to_sym, 1, false)
end
private
@@ -57,6 +60,23 @@ module Gitlab
def current_transaction
Transaction.current
end
+
+ def metric_cache_operation_duration_seconds
+ @metric_cache_operation_duration_seconds ||= Gitlab::Metrics.histogram(
+ :gitlab_cache_operation_duration_seconds,
+ 'Cache access time',
+ Transaction::BASE_LABELS.merge({ action: nil }),
+ [0.001, 0.01, 0.1, 1, 10]
+ )
+ end
+
+ def metric_cache_misses_total
+ @metric_cache_misses_total ||= Gitlab::Metrics.counter(
+ :gitlab_cache_misses_total,
+ 'Cache read miss',
+ Transaction::BASE_LABELS
+ )
+ end
end
end
end
diff --git a/lib/gitlab/metrics/system.rb b/lib/gitlab/metrics/system.rb
index aba3e0df382..e60e245cf89 100644
--- a/lib/gitlab/metrics/system.rb
+++ b/lib/gitlab/metrics/system.rb
@@ -35,27 +35,27 @@ module Gitlab
if Process.const_defined?(:CLOCK_THREAD_CPUTIME_ID)
def self.cpu_time
Process
- .clock_gettime(Process::CLOCK_THREAD_CPUTIME_ID, :millisecond)
+ .clock_gettime(Process::CLOCK_THREAD_CPUTIME_ID, :float_second)
end
else
def self.cpu_time
Process
- .clock_gettime(Process::CLOCK_PROCESS_CPUTIME_ID, :millisecond)
+ .clock_gettime(Process::CLOCK_PROCESS_CPUTIME_ID, :float_second)
end
end
# Returns the current real time in a given precision.
#
- # Returns the time as a Float.
- def self.real_time(precision = :millisecond)
+ # Returns the time as a Float for precision = :float_second.
+ def self.real_time(precision = :float_second)
Process.clock_gettime(Process::CLOCK_REALTIME, precision)
end
- # Returns the current monotonic clock time in a given precision.
+ # Returns the current monotonic clock time as seconds with microseconds precision.
#
# Returns the time as a Float.
- def self.monotonic_time(precision = :millisecond)
- Process.clock_gettime(Process::CLOCK_MONOTONIC, precision)
+ def self.monotonic_time
+ Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_second)
end
end
end
diff --git a/lib/gitlab/metrics/transaction.rb b/lib/gitlab/metrics/transaction.rb
index 4f9fb1c7853..f3e48083c19 100644
--- a/lib/gitlab/metrics/transaction.rb
+++ b/lib/gitlab/metrics/transaction.rb
@@ -2,6 +2,11 @@ module Gitlab
module Metrics
# Class for storing metrics information of a single transaction.
class Transaction
+ include Gitlab::Metrics::Methods
+
+ # base labels shared among all transactions
+ BASE_LABELS = { controller: nil, action: nil }.freeze
+
THREAD_KEY = :_gitlab_metrics_transaction
# The series to store events (e.g. Git pushes) in.
@@ -9,33 +14,32 @@ module Gitlab
attr_reader :tags, :values, :method, :metrics
- attr_accessor :action
-
def self.current
Thread.current[THREAD_KEY]
end
- # action - A String describing the action performed, usually the class
- # plus method name.
- def initialize(action = nil)
+ def initialize
@metrics = []
@methods = {}
- @started_at = nil
+ @started_at = nil
@finished_at = nil
@values = Hash.new(0)
- @tags = {}
- @action = action
+ @tags = {}
@memory_before = 0
- @memory_after = 0
+ @memory_after = 0
end
def duration
@finished_at ? (@finished_at - @started_at) : 0.0
end
+ def duration_milliseconds
+ duration.in_milliseconds.to_i
+ end
+
def allocated_memory
@memory_after - @memory_before
end
@@ -44,12 +48,15 @@ module Gitlab
Thread.current[THREAD_KEY] = self
@memory_before = System.memory_usage
- @started_at = System.monotonic_time
+ @started_at = System.monotonic_time
yield
ensure
@memory_after = System.memory_usage
- @finished_at = System.monotonic_time
+ @finished_at = System.monotonic_time
+
+ self.class.gitlab_transaction_duration_seconds.observe(labels, duration)
+ self.class.gitlab_transaction_allocated_memory_bytes.observe(labels, allocated_memory * 1024.0)
Thread.current[THREAD_KEY] = nil
end
@@ -66,40 +73,36 @@ module Gitlab
# event_name - The name of the event (e.g. "git_push").
# tags - A set of tags to attach to the event.
def add_event(event_name, tags = {})
- @metrics << Metric.new(EVENT_SERIES,
- { count: 1 },
- { event: event_name }.merge(tags),
- :event)
+ self.class.transaction_metric(event_name, :counter, prefix: 'event_', use_feature_flag: true, tags: tags).increment(tags.merge(labels))
+ @metrics << Metric.new(EVENT_SERIES, { count: 1 }, tags.merge(event: event_name), :event)
end
# Returns a MethodCall object for the given name.
- def method_call_for(name)
+ def method_call_for(name, module_name, method_name)
unless method = @methods[name]
- @methods[name] = method = MethodCall.new(name, Instrumentation.series)
+ @methods[name] = method = MethodCall.new(name, module_name, method_name, self)
end
method
end
- def increment(name, value)
+ def increment(name, value, use_prometheus = true)
+ self.class.transaction_metric(name, :counter).increment(labels, value) if use_prometheus
@values[name] += value
end
- def set(name, value)
+ def set(name, value, use_prometheus = true)
+ self.class.transaction_metric(name, :gauge).set(labels, value) if use_prometheus
@values[name] = value
end
- def add_tag(key, value)
- @tags[key] = value
- end
-
def finish
track_self
submit
end
def track_self
- values = { duration: duration, allocated_memory: allocated_memory }
+ values = { duration: duration_milliseconds, allocated_memory: allocated_memory }
@values.each do |name, value|
values[name] = value
@@ -117,14 +120,48 @@ module Gitlab
submit_hashes = submit.map do |metric|
hash = metric.to_hash
-
- hash[:tags][:action] ||= @action if @action && !metric.event?
+ hash[:tags][:action] ||= action if action && !metric.event?
hash
end
Metrics.submit_metrics(submit_hashes)
end
+
+ def labels
+ BASE_LABELS
+ end
+
+ # returns string describing the action performed, usually the class plus method name.
+ def action
+ "#{labels[:controller]}##{labels[:action]}" if labels && !labels.empty?
+ end
+
+ define_histogram :gitlab_transaction_duration_seconds do
+ docstring 'Transaction duration'
+ base_labels BASE_LABELS
+ buckets [0.001, 0.01, 0.1, 1.0, 10.0]
+ end
+
+ define_histogram :gitlab_transaction_allocated_memory_bytes do
+ docstring 'Transaction allocated memory bytes'
+ base_labels BASE_LABELS
+ buckets [100, 1000, 10000, 100000, 1000000, 10000000]
+ with_feature :prometheus_metrics_transaction_allocated_memory
+ end
+
+ def self.transaction_metric(name, type, prefix: nil, use_feature_flag: false, tags: {})
+ metric_name = "gitlab_transaction_#{prefix}#{name}_total".to_sym
+ fetch_metric(type, metric_name) do
+ docstring "Transaction #{prefix}#{name} #{type}"
+ base_labels tags.merge(BASE_LABELS)
+ with_feature "prometheus_transaction_#{prefix}#{name}_total".to_sym if use_feature_flag
+
+ if type == :gauge
+ multiprocess_mode :livesum
+ end
+ end
+ end
end
end
end
diff --git a/lib/gitlab/metrics/unicorn_sampler.rb b/lib/gitlab/metrics/unicorn_sampler.rb
deleted file mode 100644
index f6987252039..00000000000
--- a/lib/gitlab/metrics/unicorn_sampler.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-module Gitlab
- module Metrics
- class UnicornSampler < BaseSampler
- def initialize(interval)
- super(interval)
- end
-
- def unicorn_active_connections
- @unicorn_active_connections ||= Gitlab::Metrics.gauge(:unicorn_active_connections, 'Unicorn active connections', {}, :max)
- end
-
- def unicorn_queued_connections
- @unicorn_queued_connections ||= Gitlab::Metrics.gauge(:unicorn_queued_connections, 'Unicorn queued connections', {}, :max)
- end
-
- def enabled?
- # Raindrops::Linux.tcp_listener_stats is only present on Linux
- unicorn_with_listeners? && Raindrops::Linux.respond_to?(:tcp_listener_stats)
- end
-
- def sample
- Raindrops::Linux.tcp_listener_stats(tcp_listeners).each do |addr, stats|
- unicorn_active_connections.set({ type: 'tcp', address: addr }, stats.active)
- unicorn_queued_connections.set({ type: 'tcp', address: addr }, stats.queued)
- end
-
- Raindrops::Linux.unix_listener_stats(unix_listeners).each do |addr, stats|
- unicorn_active_connections.set({ type: 'unix', address: addr }, stats.active)
- unicorn_queued_connections.set({ type: 'unix', address: addr }, stats.queued)
- end
- end
-
- private
-
- def tcp_listeners
- @tcp_listeners ||= Unicorn.listener_names.grep(%r{\A[^/]+:\d+\z})
- end
-
- def unix_listeners
- @unix_listeners ||= Unicorn.listener_names - tcp_listeners
- end
-
- def unicorn_with_listeners?
- defined?(Unicorn) && Unicorn.listener_names.any?
- end
- end
- end
-end
diff --git a/lib/gitlab/metrics/web_transaction.rb b/lib/gitlab/metrics/web_transaction.rb
new file mode 100644
index 00000000000..89ff02a96d6
--- /dev/null
+++ b/lib/gitlab/metrics/web_transaction.rb
@@ -0,0 +1,82 @@
+module Gitlab
+ module Metrics
+ class WebTransaction < Transaction
+ CONTROLLER_KEY = 'action_controller.instance'.freeze
+ ENDPOINT_KEY = 'api.endpoint'.freeze
+
+ CONTENT_TYPES = {
+ 'text/html' => :html,
+ 'text/plain' => :txt,
+ 'application/json' => :json,
+ 'text/js' => :js,
+ 'application/atom+xml' => :atom,
+ 'image/png' => :png,
+ 'image/jpeg' => :jpeg,
+ 'image/gif' => :gif,
+ 'image/svg+xml' => :svg
+ }.freeze
+
+ def initialize(env)
+ super()
+ @env = env
+ end
+
+ def labels
+ return @labels if @labels
+
+ # memoize transaction labels only source env variables were present
+ @labels = if @env[CONTROLLER_KEY]
+ labels_from_controller || {}
+ elsif @env[ENDPOINT_KEY]
+ labels_from_endpoint || {}
+ end
+
+ @labels || {}
+ end
+
+ private
+
+ def labels_from_controller
+ controller = @env[CONTROLLER_KEY]
+
+ action = "#{controller.action_name}"
+ suffix = CONTENT_TYPES[controller.content_type]
+
+ if suffix && suffix != :html
+ action += ".#{suffix}"
+ end
+
+ { controller: controller.class.name, action: action }
+ end
+
+ def labels_from_endpoint
+ endpoint = @env[ENDPOINT_KEY]
+
+ begin
+ route = endpoint.route
+ rescue
+ # endpoint.route is calling env[Grape::Env::GRAPE_ROUTING_ARGS][:route_info]
+ # but env[Grape::Env::GRAPE_ROUTING_ARGS] is nil in the case of a 405 response
+ # so we're rescuing exceptions and bailing out
+ end
+
+ if route
+ path = endpoint_paths_cache[route.request_method][route.path]
+ { controller: 'Grape', action: "#{route.request_method} #{path}" }
+ end
+ end
+
+ def endpoint_paths_cache
+ @endpoint_paths_cache ||= Hash.new do |hash, http_method|
+ hash[http_method] = Hash.new do |inner_hash, raw_path|
+ inner_hash[raw_path] = endpoint_instrumentable_path(raw_path)
+ end
+ end
+ end
+
+ def endpoint_instrumentable_path(raw_path)
+ raw_path.sub('(.:format)', '').sub('/:version', '')
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/middleware/go.rb b/lib/gitlab/middleware/go.rb
index f42168c720e..1fd8f147b44 100644
--- a/lib/gitlab/middleware/go.rb
+++ b/lib/gitlab/middleware/go.rb
@@ -37,24 +37,34 @@ module Gitlab
end
def go_body(path)
- project_url = URI.join(Gitlab.config.gitlab.url, path)
+ config = Gitlab.config
+ project_url = URI.join(config.gitlab.url, path)
import_prefix = strip_url(project_url.to_s)
- meta_tag = tag :meta, name: 'go-import', content: "#{import_prefix} git #{project_url}.git"
+ repository_url = if Gitlab::CurrentSettings.enabled_git_access_protocol == 'ssh'
+ shell = config.gitlab_shell
+ port = ":#{shell.ssh_port}" unless shell.ssh_port == 22
+ "ssh://#{shell.ssh_user}@#{shell.ssh_host}#{port}/#{path}.git"
+ else
+ "#{project_url}.git"
+ end
+
+ meta_tag = tag :meta, name: 'go-import', content: "#{import_prefix} git #{repository_url}"
head_tag = content_tag :head, meta_tag
content_tag :html, head_tag
end
def strip_url(url)
- url.gsub(/\Ahttps?:\/\//, '')
+ url.gsub(%r{\Ahttps?://}, '')
end
def project_path(request)
path_info = request.env["PATH_INFO"]
- path_info.sub!(/^\//, '')
+ path_info.sub!(%r{^/}, '')
project_path_match = "#{path_info}/".match(PROJECT_PATH_REGEX)
return unless project_path_match
+
path = project_path_match[1]
# Go subpackages may be in the form of `namespace/project/path1/path2/../pathN`.
@@ -104,7 +114,15 @@ module Gitlab
end
def current_user(request)
- request.env['warden']&.authenticate
+ authenticator = Gitlab::Auth::RequestAuthenticator.new(request)
+ user = authenticator.find_user_from_access_token || authenticator.find_user_from_warden
+
+ return unless user&.can?(:access_api)
+
+ # Right now, the `api` scope is the only one that should be able to determine private project existence.
+ return unless authenticator.valid_access_token?(scopes: [:api])
+
+ user
end
end
end
diff --git a/lib/gitlab/middleware/multipart.rb b/lib/gitlab/middleware/multipart.rb
index fee741b47be..d4c54049b74 100644
--- a/lib/gitlab/middleware/multipart.rb
+++ b/lib/gitlab/middleware/multipart.rb
@@ -42,11 +42,12 @@ module Gitlab
key, value = parsed_field.first
if value.nil?
- value = open_file(tmp_path)
+ value = open_file(tmp_path, @request.params["#{key}.name"])
@open_files << value
else
value = decorate_params_value(value, @request.params[key], tmp_path)
end
+
@request.update_param(key, value)
end
@@ -60,6 +61,7 @@ module Gitlab
unless path_hash.is_a?(Hash) && path_hash.count == 1
raise "invalid path: #{path_hash.inspect}"
end
+
path_key, path_value = path_hash.first
unless value_hash.is_a?(Hash) && value_hash[path_key]
@@ -68,7 +70,7 @@ module Gitlab
case path_value
when nil
- value_hash[path_key] = open_file(tmp_path)
+ value_hash[path_key] = open_file(tmp_path, value_hash.dig(path_key, '.name'))
@open_files << value_hash[path_key]
value_hash
when Hash
@@ -79,8 +81,8 @@ module Gitlab
end
end
- def open_file(path)
- ::UploadedFile.new(path, File.basename(path), 'application/octet-stream')
+ def open_file(path, name)
+ ::UploadedFile.new(path, name || File.basename(path), 'application/octet-stream')
end
end
diff --git a/lib/gitlab/middleware/rails_queue_duration.rb b/lib/gitlab/middleware/rails_queue_duration.rb
index 63c3372da51..bc70b2459ef 100644
--- a/lib/gitlab/middleware/rails_queue_duration.rb
+++ b/lib/gitlab/middleware/rails_queue_duration.rb
@@ -14,11 +14,22 @@ module Gitlab
proxy_start = env['HTTP_GITLAB_WORKHORSE_PROXY_START'].presence
if trans && proxy_start
# Time in milliseconds since gitlab-workhorse started the request
- trans.set(:rails_queue_duration, Time.now.to_f * 1_000 - proxy_start.to_f / 1_000_000)
+ duration = Time.now.to_f * 1_000 - proxy_start.to_f / 1_000_000
+ trans.set(:rails_queue_duration, duration)
+ metric_rails_queue_duration_seconds.observe(trans.labels, duration / 1_000)
end
@app.call(env)
end
+
+ private
+
+ def metric_rails_queue_duration_seconds
+ @metric_rails_queue_duration_seconds ||= Gitlab::Metrics.histogram(
+ :gitlab_rails_queue_duration_seconds,
+ Gitlab::Metrics::Transaction::BASE_LABELS
+ )
+ end
end
end
end
diff --git a/lib/gitlab/middleware/read_only.rb b/lib/gitlab/middleware/read_only.rb
new file mode 100644
index 00000000000..d9d5f90596f
--- /dev/null
+++ b/lib/gitlab/middleware/read_only.rb
@@ -0,0 +1,20 @@
+module Gitlab
+ module Middleware
+ class ReadOnly
+ API_VERSIONS = (3..4)
+
+ def self.internal_routes
+ @internal_routes ||=
+ API_VERSIONS.map { |version| "api/v#{version}/internal" }
+ end
+
+ def initialize(app)
+ @app = app
+ end
+
+ def call(env)
+ ReadOnly::Controller.new(@app, env).call
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/middleware/read_only/controller.rb b/lib/gitlab/middleware/read_only/controller.rb
new file mode 100644
index 00000000000..45b644e6510
--- /dev/null
+++ b/lib/gitlab/middleware/read_only/controller.rb
@@ -0,0 +1,86 @@
+module Gitlab
+ module Middleware
+ class ReadOnly
+ class Controller
+ DISALLOWED_METHODS = %w(POST PATCH PUT DELETE).freeze
+ APPLICATION_JSON = 'application/json'.freeze
+ ERROR_MESSAGE = 'You cannot perform write operations on a read-only instance'.freeze
+
+ def initialize(app, env)
+ @app = app
+ @env = env
+ end
+
+ def call
+ if disallowed_request? && Gitlab::Database.read_only?
+ Rails.logger.debug('GitLab ReadOnly: preventing possible non read-only operation')
+
+ if json_request?
+ return [403, { 'Content-Type' => APPLICATION_JSON }, [{ 'message' => ERROR_MESSAGE }.to_json]]
+ else
+ rack_flash.alert = ERROR_MESSAGE
+ rack_session['flash'] = rack_flash.to_session_value
+
+ return [301, { 'Location' => last_visited_url }, []]
+ end
+ end
+
+ @app.call(@env)
+ end
+
+ private
+
+ def disallowed_request?
+ DISALLOWED_METHODS.include?(@env['REQUEST_METHOD']) &&
+ !whitelisted_routes
+ end
+
+ def json_request?
+ request.media_type == APPLICATION_JSON
+ end
+
+ def rack_flash
+ @rack_flash ||= ActionDispatch::Flash::FlashHash.from_session_value(rack_session)
+ end
+
+ def rack_session
+ @env['rack.session']
+ end
+
+ def request
+ @env['rack.request'] ||= Rack::Request.new(@env)
+ end
+
+ def last_visited_url
+ @env['HTTP_REFERER'] || rack_session['user_return_to'] || Gitlab::Routing.url_helpers.root_url
+ end
+
+ def route_hash
+ @route_hash ||= Rails.application.routes.recognize_path(request.url, { method: request.request_method }) rescue {}
+ end
+
+ def whitelisted_routes
+ grack_route || ReadOnly.internal_routes.any? { |path| request.path.include?(path) } || lfs_route || sidekiq_route
+ end
+
+ def sidekiq_route
+ request.path.start_with?('/admin/sidekiq')
+ end
+
+ def grack_route
+ # Calling route_hash may be expensive. Only do it if we think there's a possible match
+ return false unless request.path.end_with?('.git/git-upload-pack')
+
+ route_hash[:controller] == 'projects/git_http' && route_hash[:action] == 'git_upload_pack'
+ end
+
+ def lfs_route
+ # Calling route_hash may be expensive. Only do it if we think there's a possible match
+ return false unless request.path.end_with?('/info/lfs/objects/batch')
+
+ route_hash[:controller] == 'projects/lfs_api' && route_hash[:action] == 'batch'
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/middleware/release_env.rb b/lib/gitlab/middleware/release_env.rb
new file mode 100644
index 00000000000..f8d0a135965
--- /dev/null
+++ b/lib/gitlab/middleware/release_env.rb
@@ -0,0 +1,14 @@
+module Gitlab
+ module Middleware
+ # Some of middleware would hold env for no good reason even after the
+ # request had already been processed, and we could not garbage collect
+ # them due to this. Put this middleware as the first middleware so that
+ # it would clear the env after the request is done, allowing GC gets a
+ # chance to release memory for the last request.
+ ReleaseEnv = Struct.new(:app) do
+ def call(env)
+ app.call(env).tap { env.clear }
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/middleware/static.rb b/lib/gitlab/middleware/static.rb
index 85ffa8aca68..aa1e9dc0fdb 100644
--- a/lib/gitlab/middleware/static.rb
+++ b/lib/gitlab/middleware/static.rb
@@ -1,7 +1,7 @@
module Gitlab
module Middleware
class Static < ActionDispatch::Static
- UPLOADS_REGEX = /\A\/uploads(\/|\z)/.freeze
+ UPLOADS_REGEX = %r{\A/uploads(/|\z)}.freeze
def call(env)
return @app.call(env) if env['PATH_INFO'] =~ UPLOADS_REGEX
diff --git a/lib/gitlab/multi_collection_paginator.rb b/lib/gitlab/multi_collection_paginator.rb
new file mode 100644
index 00000000000..43921a8c1c0
--- /dev/null
+++ b/lib/gitlab/multi_collection_paginator.rb
@@ -0,0 +1,64 @@
+module Gitlab
+ class MultiCollectionPaginator
+ attr_reader :first_collection, :second_collection, :per_page
+
+ def initialize(*collections, per_page: nil)
+ raise ArgumentError.new('Only 2 collections are supported') if collections.size != 2
+
+ @per_page = per_page || Kaminari.config.default_per_page
+ @first_collection, @second_collection = collections
+ end
+
+ def paginate(page)
+ page = page.to_i
+ paginated_first_collection(page) + paginated_second_collection(page)
+ end
+
+ def total_count
+ @total_count ||= first_collection.size + second_collection.size
+ end
+
+ private
+
+ def paginated_first_collection(page)
+ @first_collection_pages ||= Hash.new do |hash, page|
+ hash[page] = first_collection.page(page).per(per_page)
+ end
+
+ @first_collection_pages[page]
+ end
+
+ def paginated_second_collection(page)
+ @second_collection_pages ||= Hash.new do |hash, page|
+ second_collection_page = page - first_collection_page_count
+
+ offset = if second_collection_page < 1 || first_collection_page_count.zero?
+ 0
+ else
+ per_page - first_collection_last_page_size
+ end
+
+ hash[page] = second_collection.page(second_collection_page)
+ .per(per_page - paginated_first_collection(page).size)
+ .padding(offset)
+ end
+
+ @second_collection_pages[page]
+ end
+
+ def first_collection_page_count
+ return @first_collection_page_count if defined?(@first_collection_page_count)
+
+ first_collection_page = paginated_first_collection(0)
+ @first_collection_page_count = first_collection_page.total_pages
+ end
+
+ def first_collection_last_page_size
+ return @first_collection_last_page_size if defined?(@first_collection_last_page_size)
+
+ @first_collection_last_page_size = paginated_first_collection(first_collection_page_count)
+ .except(:select)
+ .size
+ end
+ end
+end
diff --git a/lib/gitlab/o_auth/auth_hash.rb b/lib/gitlab/o_auth/auth_hash.rb
deleted file mode 100644
index 5b5ed449f94..00000000000
--- a/lib/gitlab/o_auth/auth_hash.rb
+++ /dev/null
@@ -1,90 +0,0 @@
-# Class to parse and transform the info provided by omniauth
-#
-module Gitlab
- module OAuth
- class AuthHash
- attr_reader :auth_hash
- def initialize(auth_hash)
- @auth_hash = auth_hash
- end
-
- def uid
- @uid ||= Gitlab::Utils.force_utf8(auth_hash.uid.to_s)
- end
-
- def provider
- @provider ||= auth_hash.provider.to_s
- end
-
- def name
- @name ||= get_info(:name) || "#{get_info(:first_name)} #{get_info(:last_name)}"
- end
-
- def username
- @username ||= username_and_email[:username].to_s
- end
-
- def email
- @email ||= username_and_email[:email].to_s
- end
-
- def password
- @password ||= Gitlab::Utils.force_utf8(Devise.friendly_token[0, 8].downcase)
- end
-
- def location
- location = get_info(:address)
- if location.is_a?(Hash)
- [location.locality.presence, location.country.presence].compact.join(', ')
- else
- location
- end
- end
-
- def has_attribute?(attribute)
- if attribute == :location
- get_info(:address).present?
- else
- get_info(attribute).present?
- end
- end
-
- private
-
- def info
- auth_hash.info
- end
-
- def get_info(key)
- value = info[key]
- Gitlab::Utils.force_utf8(value) if value
- value
- end
-
- def username_and_email
- @username_and_email ||= begin
- username = get_info(:username).presence || get_info(:nickname).presence
- email = get_info(:email).presence
-
- username ||= generate_username(email) if email
- email ||= generate_temporarily_email(username) if username
-
- {
- username: username,
- email: email
- }
- end
- end
-
- # Get the first part of the email address (before @)
- # In addtion in removes illegal characters
- def generate_username(email)
- email.match(/^[^@]*/)[0].mb_chars.normalize(:kd).gsub(/[^\x00-\x7F]/, '').to_s
- end
-
- def generate_temporarily_email(username)
- "temp-email-for-oauth-#{username}@gitlab.localhost"
- end
- end
- end
-end
diff --git a/lib/gitlab/o_auth/provider.rb b/lib/gitlab/o_auth/provider.rb
deleted file mode 100644
index ac9d66c836d..00000000000
--- a/lib/gitlab/o_auth/provider.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-module Gitlab
- module OAuth
- class Provider
- LABELS = {
- "github" => "GitHub",
- "gitlab" => "GitLab.com",
- "google_oauth2" => "Google"
- }.freeze
-
- def self.providers
- Devise.omniauth_providers
- end
-
- def self.enabled?(name)
- providers.include?(name.to_sym)
- end
-
- def self.ldap_provider?(name)
- name.to_s.start_with?('ldap')
- end
-
- def self.config_for(name)
- name = name.to_s
- if ldap_provider?(name)
- if Gitlab::LDAP::Config.valid_provider?(name)
- Gitlab::LDAP::Config.new(name).options
- else
- nil
- end
- else
- Gitlab.config.omniauth.providers.find { |provider| provider.name == name }
- end
- end
-
- def self.label_for(name)
- name = name.to_s
- config = config_for(name)
- (config && config['label']) || LABELS[name] || name.titleize
- end
- end
- end
-end
diff --git a/lib/gitlab/o_auth/session.rb b/lib/gitlab/o_auth/session.rb
deleted file mode 100644
index 30739f2a2c5..00000000000
--- a/lib/gitlab/o_auth/session.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# :nocov:
-module Gitlab
- module OAuth
- module Session
- def self.create(provider, ticket)
- Rails.cache.write("gitlab:#{provider}:#{ticket}", ticket, expires_in: Gitlab.config.omniauth.cas3.session_duration)
- end
-
- def self.destroy(provider, ticket)
- Rails.cache.delete("gitlab:#{provider}:#{ticket}")
- end
-
- def self.valid?(provider, ticket)
- Rails.cache.read("gitlab:#{provider}:#{ticket}").present?
- end
- end
- end
-end
-# :nocov:
diff --git a/lib/gitlab/o_auth/user.rb b/lib/gitlab/o_auth/user.rb
deleted file mode 100644
index 7704bf715e4..00000000000
--- a/lib/gitlab/o_auth/user.rb
+++ /dev/null
@@ -1,222 +0,0 @@
-# OAuth extension for User model
-#
-# * Find GitLab user based on omniauth uid and provider
-# * Create new user from omniauth data
-#
-module Gitlab
- module OAuth
- SignupDisabledError = Class.new(StandardError)
-
- class User
- attr_accessor :auth_hash, :gl_user
-
- def initialize(auth_hash)
- self.auth_hash = auth_hash
- update_profile if sync_profile_from_provider?
- end
-
- def persisted?
- gl_user.try(:persisted?)
- end
-
- def new?
- !persisted?
- end
-
- def valid?
- gl_user.try(:valid?)
- end
-
- def save(provider = 'OAuth')
- unauthorized_to_create unless gl_user
-
- block_after_save = needs_blocking?
-
- Users::UpdateService.new(gl_user).execute!
-
- gl_user.block if block_after_save
-
- log.info "(#{provider}) saving user #{auth_hash.email} from login with extern_uid => #{auth_hash.uid}"
- gl_user
- rescue ActiveRecord::RecordInvalid => e
- log.info "(#{provider}) Error saving user #{auth_hash.uid} (#{auth_hash.email}): #{gl_user.errors.full_messages}"
- return self, e.record.errors
- end
-
- def gl_user
- @user ||= find_by_uid_and_provider
-
- if auto_link_ldap_user?
- @user ||= find_or_create_ldap_user
- end
-
- if signup_enabled?
- @user ||= build_new_user
- end
-
- if external_provider? && @user
- @user.external = true
- end
-
- @user
- end
-
- protected
-
- def find_or_create_ldap_user
- return unless ldap_person
-
- # If a corresponding person exists with same uid in a LDAP server,
- # check if the user already has a GitLab account.
- user = Gitlab::LDAP::User.find_by_uid_and_provider(ldap_person.dn, ldap_person.provider)
- if user
- # Case when a LDAP user already exists in Gitlab. Add the OAuth identity to existing account.
- log.info "LDAP account found for user #{user.username}. Building new #{auth_hash.provider} identity."
- user.identities.find_or_initialize_by(extern_uid: auth_hash.uid, provider: auth_hash.provider)
- else
- log.info "No existing LDAP account was found in GitLab. Checking for #{auth_hash.provider} account."
- user = find_by_uid_and_provider
- if user.nil?
- log.info "No user found using #{auth_hash.provider} provider. Creating a new one."
- user = build_new_user
- end
- log.info "Correct account has been found. Adding LDAP identity to user: #{user.username}."
- user.identities.new(provider: ldap_person.provider, extern_uid: ldap_person.dn)
- end
-
- user
- end
-
- def auto_link_ldap_user?
- Gitlab.config.omniauth.auto_link_ldap_user
- end
-
- def creating_linked_ldap_user?
- auto_link_ldap_user? && ldap_person
- end
-
- def ldap_person
- return @ldap_person if defined?(@ldap_person)
-
- # Look for a corresponding person with same uid in any of the configured LDAP providers
- Gitlab::LDAP::Config.providers.each do |provider|
- adapter = Gitlab::LDAP::Adapter.new(provider)
- @ldap_person = find_ldap_person(auth_hash, adapter)
- break if @ldap_person
- end
- @ldap_person
- end
-
- def find_ldap_person(auth_hash, adapter)
- by_uid = Gitlab::LDAP::Person.find_by_uid(auth_hash.uid, adapter)
- # The `uid` might actually be a DN. Try it next.
- by_uid || Gitlab::LDAP::Person.find_by_dn(auth_hash.uid, adapter)
- end
-
- def ldap_config
- Gitlab::LDAP::Config.new(ldap_person.provider) if ldap_person
- end
-
- def needs_blocking?
- new? && block_after_signup?
- end
-
- def signup_enabled?
- providers = Gitlab.config.omniauth.allow_single_sign_on
- if providers.is_a?(Array)
- providers.include?(auth_hash.provider)
- else
- providers
- end
- end
-
- def external_provider?
- Gitlab.config.omniauth.external_providers.include?(auth_hash.provider)
- end
-
- def block_after_signup?
- if creating_linked_ldap_user?
- ldap_config.block_auto_created_users
- else
- Gitlab.config.omniauth.block_auto_created_users
- end
- end
-
- def auth_hash=(auth_hash)
- @auth_hash = AuthHash.new(auth_hash)
- end
-
- def find_by_uid_and_provider
- identity = Identity.find_by(provider: auth_hash.provider, extern_uid: auth_hash.uid)
- identity && identity.user
- end
-
- def build_new_user
- user_params = user_attributes.merge(extern_uid: auth_hash.uid, provider: auth_hash.provider, skip_confirmation: true)
- Users::BuildService.new(nil, user_params).execute(skip_authorization: true)
- end
-
- def user_attributes
- # Give preference to LDAP for sensitive information when creating a linked account
- if creating_linked_ldap_user?
- username = ldap_person.username.presence
- email = ldap_person.email.first.presence
- end
-
- username ||= auth_hash.username
- email ||= auth_hash.email
-
- valid_username = ::Namespace.clean_path(username)
-
- uniquify = Uniquify.new
- valid_username = uniquify.string(valid_username) { |s| !DynamicPathValidator.valid_user_path?(s) }
-
- name = auth_hash.name
- name = valid_username if name.strip.empty?
-
- {
- name: name,
- username: valid_username,
- email: email,
- password: auth_hash.password,
- password_confirmation: auth_hash.password,
- password_automatically_set: true
- }
- end
-
- def sync_profile_from_provider?
- providers = Gitlab.config.omniauth.sync_profile_from_provider
-
- if providers.is_a?(Array)
- providers.include?(auth_hash.provider)
- else
- providers
- end
- end
-
- def update_profile
- user_synced_attributes_metadata = gl_user.user_synced_attributes_metadata || gl_user.build_user_synced_attributes_metadata
-
- UserSyncedAttributesMetadata::SYNCABLE_ATTRIBUTES.each do |key|
- if auth_hash.has_attribute?(key) && gl_user.sync_attribute?(key)
- gl_user[key] = auth_hash.public_send(key) # rubocop:disable GitlabSecurity/PublicSend
- user_synced_attributes_metadata.set_attribute_synced(key, true)
- else
- user_synced_attributes_metadata.set_attribute_synced(key, false)
- end
- end
-
- user_synced_attributes_metadata.provider = auth_hash.provider
- gl_user.user_synced_attributes_metadata = user_synced_attributes_metadata
- end
-
- def log
- Gitlab::AppLogger
- end
-
- def unauthorized_to_create
- raise SignupDisabledError
- end
- end
- end
-end
diff --git a/lib/gitlab/optimistic_locking.rb b/lib/gitlab/optimistic_locking.rb
index 962ff4d3985..1d9a5d1a20a 100644
--- a/lib/gitlab/optimistic_locking.rb
+++ b/lib/gitlab/optimistic_locking.rb
@@ -11,6 +11,7 @@ module Gitlab
rescue ActiveRecord::StaleObjectError
retries -= 1
raise unless retries >= 0
+
subject.reload
end
end
diff --git a/lib/gitlab/path_regex.rb b/lib/gitlab/path_regex.rb
index 7c02c9c5c48..4dc38aae61e 100644
--- a/lib/gitlab/path_regex.rb
+++ b/lib/gitlab/path_regex.rb
@@ -26,7 +26,6 @@ module Gitlab
apple-touch-icon.png
assets
autocomplete
- boards
ci
dashboard
deploy.html
@@ -52,7 +51,6 @@ module Gitlab
slash-command-logo.png
snippets
u
- unicorn_test
unsubscribes
uploads
users
@@ -113,23 +111,6 @@ module Gitlab
# this would map to the activity-page of its parent.
GROUP_ROUTES = %w[
-
- activity
- analytics
- audit_events
- avatar
- edit
- group_members
- hooks
- issues
- labels
- ldap
- ldap_group_links
- merge_requests
- milestones
- notification_setting
- pipeline_quota
- projects
- subgroups
].freeze
ILLEGAL_PROJECT_PATH_WORDS = PROJECT_WILDCARD_ROUTES
@@ -190,24 +171,16 @@ module Gitlab
@project_git_route_regex ||= /#{project_route_regex}\.git/.freeze
end
- def root_namespace_path_regex
- @root_namespace_path_regex ||= %r{\A#{root_namespace_route_regex}/\z}
- end
-
def full_namespace_path_regex
@full_namespace_path_regex ||= %r{\A#{full_namespace_route_regex}/\z}
end
- def project_path_regex
- @project_path_regex ||= %r{\A#{project_route_regex}/\z}
- end
-
def full_project_path_regex
@full_project_path_regex ||= %r{\A#{full_namespace_route_regex}/#{project_route_regex}/\z}
end
- def full_namespace_format_regex
- @namespace_format_regex ||= /A#{FULL_NAMESPACE_FORMAT_REGEX}\z/.freeze
+ def full_project_git_path_regex
+ @full_project_git_path_regex ||= %r{\A\/?(?<namespace_path>#{full_namespace_route_regex})\/(?<project_path>#{project_route_regex})\.git\z}
end
def namespace_format_regex
diff --git a/lib/gitlab/performance_bar.rb b/lib/gitlab/performance_bar.rb
index e73245b82c1..6c2b2036074 100644
--- a/lib/gitlab/performance_bar.rb
+++ b/lib/gitlab/performance_bar.rb
@@ -1,18 +1,17 @@
module Gitlab
module PerformanceBar
- extend Gitlab::CurrentSettings
-
ALLOWED_USER_IDS_KEY = 'performance_bar_allowed_user_ids:v2'.freeze
EXPIRY_TIME = 5.minutes
def self.enabled?(user = nil)
+ return true if Rails.env.development?
return false unless user && allowed_group_id
allowed_user_ids.include?(user.id)
end
def self.allowed_group_id
- current_application_settings.performance_bar_allowed_group_id
+ Gitlab::CurrentSettings.performance_bar_allowed_group_id
end
def self.allowed_user_ids
diff --git a/lib/gitlab/performance_bar/peek_query_tracker.rb b/lib/gitlab/performance_bar/peek_query_tracker.rb
index 67fee8c227d..f2825db59ae 100644
--- a/lib/gitlab/performance_bar/peek_query_tracker.rb
+++ b/lib/gitlab/performance_bar/peek_query_tracker.rb
@@ -36,8 +36,8 @@ module Gitlab
end
def track_query(raw_query, bindings, start, finish)
- query = Gitlab::Sherlock::Query.new(raw_query, start, finish)
- query_info = { duration: query.duration.round(3), sql: query.formatted_query }
+ duration = (finish - start) * 1000.0
+ query_info = { duration: duration.round(3), sql: raw_query }
PEEK_DB_CLIENT.query_details << query_info
end
diff --git a/lib/gitlab/plugin.rb b/lib/gitlab/plugin.rb
new file mode 100644
index 00000000000..0d1cb16b378
--- /dev/null
+++ b/lib/gitlab/plugin.rb
@@ -0,0 +1,26 @@
+module Gitlab
+ module Plugin
+ def self.files
+ Dir.glob(Rails.root.join('plugins/*')).select do |entry|
+ File.file?(entry)
+ end
+ end
+
+ def self.execute_all_async(data)
+ args = files.map { |file| [file, data] }
+
+ PluginWorker.bulk_perform_async(args)
+ end
+
+ def self.execute(file, data)
+ result = Gitlab::Popen.popen_with_detail([file]) do |stdin|
+ stdin.write(data.to_json)
+ end
+
+ exit_status = result.status&.exitstatus
+ [exit_status.zero?, result.stderr]
+ rescue => e
+ [false, e.message]
+ end
+ end
+end
diff --git a/lib/gitlab/plugin_logger.rb b/lib/gitlab/plugin_logger.rb
new file mode 100644
index 00000000000..c4f6ec3e21d
--- /dev/null
+++ b/lib/gitlab/plugin_logger.rb
@@ -0,0 +1,7 @@
+module Gitlab
+ class PluginLogger < Gitlab::Logger
+ def self.file_name_noext
+ 'plugin'
+ end
+ end
+end
diff --git a/lib/gitlab/polling_interval.rb b/lib/gitlab/polling_interval.rb
index 4780675a492..fe4bdfe3831 100644
--- a/lib/gitlab/polling_interval.rb
+++ b/lib/gitlab/polling_interval.rb
@@ -1,12 +1,10 @@
module Gitlab
class PollingInterval
- extend Gitlab::CurrentSettings
-
HEADER_NAME = 'Poll-Interval'.freeze
def self.set_header(response, interval:)
if polling_enabled?
- multiplier = current_application_settings.polling_interval_multiplier
+ multiplier = Gitlab::CurrentSettings.polling_interval_multiplier
value = (interval * multiplier).to_i
else
value = -1
@@ -16,7 +14,7 @@ module Gitlab
end
def self.polling_enabled?
- !current_application_settings.polling_interval_multiplier.zero?
+ !Gitlab::CurrentSettings.polling_interval_multiplier.zero?
end
end
end
diff --git a/lib/gitlab/popen.rb b/lib/gitlab/popen.rb
index 4bc5cda8cb5..b9832a724c4 100644
--- a/lib/gitlab/popen.rb
+++ b/lib/gitlab/popen.rb
@@ -5,7 +5,17 @@ module Gitlab
module Popen
extend self
- def popen(cmd, path = nil, vars = {})
+ Result = Struct.new(:cmd, :stdout, :stderr, :status, :duration)
+
+ # Returns [stdout + stderr, status]
+ def popen(cmd, path = nil, vars = {}, &block)
+ result = popen_with_detail(cmd, path, vars, &block)
+
+ [result.stdout << result.stderr, result.status&.exitstatus]
+ end
+
+ # Returns Result
+ def popen_with_detail(cmd, path = nil, vars = {})
unless cmd.is_a?(Array)
raise "System commands must be given as an array of strings"
end
@@ -18,18 +28,21 @@ module Gitlab
FileUtils.mkdir_p(path)
end
- cmd_output = ""
- cmd_status = 0
+ cmd_stdout = ''
+ cmd_stderr = ''
+ cmd_status = nil
+ start = Time.now
+
Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|
yield(stdin) if block_given?
stdin.close
- cmd_output << stdout.read
- cmd_output << stderr.read
- cmd_status = wait_thr.value.exitstatus
+ cmd_stdout = stdout.read
+ cmd_stderr = stderr.read
+ cmd_status = wait_thr.value
end
- [cmd_output, cmd_status]
+ Result.new(cmd, cmd_stdout, cmd_stderr, cmd_status, Time.now - start)
end
end
end
diff --git a/lib/gitlab/popen/runner.rb b/lib/gitlab/popen/runner.rb
new file mode 100644
index 00000000000..f44035a48bb
--- /dev/null
+++ b/lib/gitlab/popen/runner.rb
@@ -0,0 +1,46 @@
+module Gitlab
+ module Popen
+ class Runner
+ attr_reader :results
+
+ def initialize
+ @results = []
+ end
+
+ def run(commands, &block)
+ commands.each do |cmd|
+ # yield doesn't support blocks, so we need to use a block variable
+ block.call(cmd) do # rubocop:disable Performance/RedundantBlockCall
+ cmd_result = Gitlab::Popen.popen_with_detail(cmd)
+
+ results << cmd_result
+
+ cmd_result
+ end
+ end
+ end
+
+ def all_success_and_clean?
+ all_success? && all_stderr_empty?
+ end
+
+ def all_success?
+ results.all? { |result| result.status.success? }
+ end
+
+ def all_stderr_empty?
+ results.all? { |result| result.stderr.empty? }
+ end
+
+ def failed_results
+ results.reject { |result| result.status.success? }
+ end
+
+ def warned_results
+ results.select do |result|
+ result.status.success? && !result.stderr.empty?
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/profiler.rb b/lib/gitlab/profiler.rb
new file mode 100644
index 00000000000..98a168b43bb
--- /dev/null
+++ b/lib/gitlab/profiler.rb
@@ -0,0 +1,143 @@
+# coding: utf-8
+module Gitlab
+ module Profiler
+ FILTERED_STRING = '[FILTERED]'.freeze
+
+ IGNORE_BACKTRACES = %w[
+ lib/gitlab/i18n.rb
+ lib/gitlab/request_context.rb
+ config/initializers
+ lib/gitlab/database/load_balancing/
+ lib/gitlab/etag_caching/
+ lib/gitlab/metrics/
+ lib/gitlab/middleware/
+ lib/gitlab/performance_bar/
+ lib/gitlab/request_profiler/
+ lib/gitlab/profiler.rb
+ ].freeze
+
+ # Takes a URL to profile (can be a fully-qualified URL, or an absolute path)
+ # and returns the ruby-prof profile result. Formatting that result is the
+ # caller's responsibility. Requests are GET requests unless post_data is
+ # passed.
+ #
+ # Optional arguments:
+ # - logger: will be used for SQL logging, including a summary at the end of
+ # the log file of the total time spent per model class.
+ #
+ # - post_data: a string of raw POST data to use. Changes the HTTP verb to
+ # POST.
+ #
+ # - user: a user to authenticate as. Only works if the user has a valid
+ # personal access token.
+ #
+ # - private_token: instead of providing a user instance, the token can be
+ # given as a string. Takes precedence over the user option.
+ def self.profile(url, logger: nil, post_data: nil, user: nil, private_token: nil)
+ app = ActionDispatch::Integration::Session.new(Rails.application)
+ verb = :get
+ headers = {}
+
+ if post_data
+ verb = :post
+ headers['Content-Type'] = 'application/json'
+ end
+
+ if user
+ private_token ||= user.personal_access_tokens.active.pluck(:token).first
+ raise 'Your user must have a personal_access_token' unless private_token
+ end
+
+ headers['Private-Token'] = private_token if private_token
+ logger = create_custom_logger(logger, private_token: private_token)
+
+ RequestStore.begin!
+
+ # Make an initial call for an asset path in development mode to avoid
+ # sprockets dominating the profiler output.
+ ActionController::Base.helpers.asset_path('katex.css') if Rails.env.development?
+
+ # Rails loads internationalization files lazily the first time a
+ # translation is needed. Running this prevents this overhead from showing
+ # up in profiles.
+ ::I18n.t('.')[:test_string]
+
+ # Remove API route mounting from the profile.
+ app.get('/api/v4/users')
+
+ result = with_custom_logger(logger) do
+ RubyProf.profile { app.public_send(verb, url, post_data, headers) } # rubocop:disable GitlabSecurity/PublicSend
+ end
+
+ RequestStore.end!
+
+ log_load_times_by_model(logger)
+
+ result
+ end
+
+ def self.create_custom_logger(logger, private_token: nil)
+ return unless logger
+
+ logger.dup.tap do |new_logger|
+ new_logger.instance_variable_set(:@private_token, private_token)
+
+ class << new_logger
+ attr_reader :load_times_by_model, :private_token
+
+ def debug(message, *)
+ message.gsub!(private_token, FILTERED_STRING) if private_token
+
+ _, type, time = *message.match(/(\w+) Load \(([0-9.]+)ms\)/)
+
+ if type && time
+ @load_times_by_model ||= {}
+ @load_times_by_model[type] ||= 0
+ @load_times_by_model[type] += time.to_f
+ end
+
+ super
+
+ backtrace = Rails.backtrace_cleaner.clean(caller)
+
+ backtrace.each do |caller_line|
+ next if caller_line.match(Regexp.union(IGNORE_BACKTRACES))
+
+ stripped_caller_line = caller_line.sub("#{Rails.root}/", '')
+
+ super(" ↳ #{stripped_caller_line}")
+ end
+ end
+ end
+ end
+ end
+
+ def self.with_custom_logger(logger)
+ original_colorize_logging = ActiveSupport::LogSubscriber.colorize_logging
+ original_activerecord_logger = ActiveRecord::Base.logger
+ original_actioncontroller_logger = ActionController::Base.logger
+
+ if logger
+ ActiveSupport::LogSubscriber.colorize_logging = false
+ ActiveRecord::Base.logger = logger
+ ActionController::Base.logger = logger
+ end
+
+ result = yield
+
+ ActiveSupport::LogSubscriber.colorize_logging = original_colorize_logging
+ ActiveRecord::Base.logger = original_activerecord_logger
+ ActionController::Base.logger = original_actioncontroller_logger
+
+ result
+ end
+
+ def self.log_load_times_by_model(logger)
+ return unless logger.respond_to?(:load_times_by_model)
+
+ logger.load_times_by_model.to_a.sort_by(&:last).reverse.each do |(model, time)|
+ logger.info("#{model} total: #{time.round(2)}ms")
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/project_search_results.rb b/lib/gitlab/project_search_results.rb
index 561aa9e162c..29277ec6481 100644
--- a/lib/gitlab/project_search_results.rb
+++ b/lib/gitlab/project_search_results.rb
@@ -2,11 +2,12 @@ module Gitlab
class ProjectSearchResults < SearchResults
attr_reader :project, :repository_ref
- def initialize(current_user, project, query, repository_ref = nil)
+ def initialize(current_user, project, query, repository_ref = nil, per_page: 20)
@current_user = current_user
@project = project
@repository_ref = repository_ref.presence || project.default_branch
@query = query
+ @per_page = per_page
end
def objects(scope, page = nil)
@@ -20,7 +21,7 @@ module Gitlab
when 'commits'
Kaminari.paginate_array(commits).page(page).per(per_page)
else
- super
+ super(scope, page, false)
end
end
@@ -28,8 +29,18 @@ module Gitlab
@blobs_count ||= blobs.count
end
- def notes_count
- @notes_count ||= notes.count
+ def limited_notes_count
+ return @limited_notes_count if defined?(@limited_notes_count)
+
+ types = %w(issue merge_request commit snippet)
+ @limited_notes_count = 0
+
+ types.each do |type|
+ @limited_notes_count += notes_finder(type).limit(count_limit).count
+ break if @limited_notes_count >= count_limit
+ end
+
+ @limited_notes_count
end
def wiki_blobs_count
@@ -40,26 +51,24 @@ module Gitlab
@commits_count ||= commits.count
end
- def self.parse_search_result(result)
+ def self.parse_search_result(result, project = nil)
ref = nil
filename = nil
basename = nil
+ data = ""
startline = 0
- result.each_line.each_with_index do |line, index|
- if line =~ /^.*:.*:\d+:/
- ref, filename, startline = line.split(':')
+ result.strip.each_line.each_with_index do |line, index|
+ prefix ||= line.match(/^(?<ref>[^:]*):(?<filename>.*)\x00(?<startline>\d+)\x00/)&.tap do |matches|
+ ref = matches[:ref]
+ filename = matches[:filename]
+ startline = matches[:startline]
startline = startline.to_i - index
extname = Regexp.escape(File.extname(filename))
basename = filename.sub(/#{extname}$/, '')
- break
end
- end
- data = ""
-
- result.each_line do |line|
- data << line.sub(ref, '').sub(filename, '').sub(/^:-\d+-/, '').sub(/^::\d+:/, '')
+ data << line.sub(prefix.to_s, '')
end
FoundBlob.new(
@@ -67,16 +76,18 @@ module Gitlab
basename: basename,
ref: ref,
startline: startline,
- data: data
+ data: data,
+ project_id: project ? project.id : nil
)
end
def single_commit_result?
- commits_count == 1 && total_result_count == 1
- end
+ return false if commits_count != 1
- def total_result_count
- issues_count + merge_requests_count + milestones_count + notes_count + blobs_count + wiki_blobs_count + commits_count
+ counts = %i(limited_milestones_count limited_notes_count
+ limited_merge_requests_count limited_issues_count
+ blobs_count wiki_blobs_count)
+ counts.all? { |count_method| public_send(count_method).zero? } # rubocop:disable GitlabSecurity/PublicSend
end
private
@@ -106,7 +117,11 @@ module Gitlab
end
def notes
- @notes ||= NotesFinder.new(project, @current_user, search: query).execute.user.order('updated_at DESC')
+ @notes ||= notes_finder(nil)
+ end
+
+ def notes_finder(type)
+ NotesFinder.new(project, @current_user, search: query, target_type: type).execute.user.order('updated_at DESC')
end
def commits
diff --git a/lib/gitlab/project_template.rb b/lib/gitlab/project_template.rb
index 732fbf68dad..ae136202f0c 100644
--- a/lib/gitlab/project_template.rb
+++ b/lib/gitlab/project_template.rb
@@ -1,9 +1,9 @@
module Gitlab
class ProjectTemplate
- attr_reader :title, :name
+ attr_reader :title, :name, :description, :preview
- def initialize(name, title)
- @name, @title = name, title
+ def initialize(name, title, description, preview)
+ @name, @title, @description, @preview = name, title, description, preview
end
alias_method :logo, :name
@@ -25,9 +25,9 @@ module Gitlab
end
TEMPLATES_TABLE = [
- ProjectTemplate.new('rails', 'Ruby on Rails'),
- ProjectTemplate.new('spring', 'Spring'),
- ProjectTemplate.new('express', 'NodeJS Express')
+ ProjectTemplate.new('rails', 'Ruby on Rails', 'Includes an MVC structure, gemfile, rakefile, and .gitlab-ci.yml file, along with many others, to help you get started.', 'https://gitlab.com/gitlab-org/project-templates/rails'),
+ ProjectTemplate.new('spring', 'Spring', 'Includes an MVC structure, mvnw, pom.xml, and .gitlab-ci.yml file to help you get started.', 'https://gitlab.com/gitlab-org/project-templates/spring'),
+ ProjectTemplate.new('express', 'NodeJS Express', 'Includes an MVC structure and .gitlab-ci.yml file to help you get started.', 'https://gitlab.com/gitlab-org/project-templates/express')
].freeze
class << self
diff --git a/lib/gitlab/prometheus/additional_metrics_parser.rb b/lib/gitlab/prometheus/additional_metrics_parser.rb
index cb95daf2260..bb1172f82a1 100644
--- a/lib/gitlab/prometheus/additional_metrics_parser.rb
+++ b/lib/gitlab/prometheus/additional_metrics_parser.rb
@@ -1,10 +1,12 @@
module Gitlab
module Prometheus
module AdditionalMetricsParser
+ CONFIG_ROOT = 'config/prometheus'.freeze
+ MUTEX = Mutex.new
extend self
- def load_groups_from_yaml
- additional_metrics_raw.map(&method(:group_from_entry))
+ def load_groups_from_yaml(file_name = 'additional_metrics.yml')
+ yaml_metrics_raw(file_name).map(&method(:group_from_entry))
end
private
@@ -22,13 +24,20 @@ module Gitlab
MetricGroup.new(entry).tap(&method(:validate!))
end
- def additional_metrics_raw
- load_yaml_file&.map(&:deep_symbolize_keys).freeze
+ def yaml_metrics_raw(file_name)
+ load_yaml_file(file_name)&.map(&:deep_symbolize_keys).freeze
end
- def load_yaml_file
- @loaded_yaml_file ||= YAML.load_file(Rails.root.join('config/prometheus/additional_metrics.yml'))
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ def load_yaml_file(file_name)
+ return YAML.load_file(Rails.root.join(CONFIG_ROOT, file_name)) if Rails.env.development?
+
+ MUTEX.synchronize do
+ @loaded_yaml_cache ||= {}
+ @loaded_yaml_cache[file_name] ||= YAML.load_file(Rails.root.join(CONFIG_ROOT, file_name))
+ end
end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
end
end
end
diff --git a/lib/gitlab/prometheus/metric_group.rb b/lib/gitlab/prometheus/metric_group.rb
index 729fef34b35..e91c6fb2e27 100644
--- a/lib/gitlab/prometheus/metric_group.rb
+++ b/lib/gitlab/prometheus/metric_group.rb
@@ -6,9 +6,14 @@ module Gitlab
attr_accessor :name, :priority, :metrics
validates :name, :priority, :metrics, presence: true
- def self.all
+ def self.common_metrics
AdditionalMetricsParser.load_groups_from_yaml
end
+
+ # EE only
+ def self.for_project(_)
+ common_metrics
+ end
end
end
end
diff --git a/lib/gitlab/prometheus/queries/additional_metrics_deployment_query.rb b/lib/gitlab/prometheus/queries/additional_metrics_deployment_query.rb
index 69d055c901c..e677ec84cd4 100644
--- a/lib/gitlab/prometheus/queries/additional_metrics_deployment_query.rb
+++ b/lib/gitlab/prometheus/queries/additional_metrics_deployment_query.rb
@@ -7,6 +7,7 @@ module Gitlab
def query(deployment_id)
Deployment.find_by(id: deployment_id).try do |deployment|
query_metrics(
+ deployment.project,
common_query_context(
deployment.environment,
timeframe_start: (deployment.created_at - 30.minutes).to_f,
diff --git a/lib/gitlab/prometheus/queries/additional_metrics_environment_query.rb b/lib/gitlab/prometheus/queries/additional_metrics_environment_query.rb
index 32fe8201a8d..9273e69e158 100644
--- a/lib/gitlab/prometheus/queries/additional_metrics_environment_query.rb
+++ b/lib/gitlab/prometheus/queries/additional_metrics_environment_query.rb
@@ -7,6 +7,7 @@ module Gitlab
def query(environment_id)
::Environment.find_by(id: environment_id).try do |environment|
query_metrics(
+ environment.project,
common_query_context(environment, timeframe_start: 8.hours.ago.to_f, timeframe_end: Time.now.to_f)
)
end
diff --git a/lib/gitlab/prometheus/queries/base_query.rb b/lib/gitlab/prometheus/queries/base_query.rb
index c60828165bd..29cab6e9c15 100644
--- a/lib/gitlab/prometheus/queries/base_query.rb
+++ b/lib/gitlab/prometheus/queries/base_query.rb
@@ -20,6 +20,10 @@ module Gitlab
def query(*args)
raise NotImplementedError
end
+
+ def self.transform_reactive_result(result)
+ result
+ end
end
end
end
diff --git a/lib/gitlab/prometheus/queries/deployment_query.rb b/lib/gitlab/prometheus/queries/deployment_query.rb
index 170f483540e..c2626581897 100644
--- a/lib/gitlab/prometheus/queries/deployment_query.rb
+++ b/lib/gitlab/prometheus/queries/deployment_query.rb
@@ -25,6 +25,11 @@ module Gitlab
}
end
end
+
+ def self.transform_reactive_result(result)
+ result[:metrics] = result.delete :data
+ result
+ end
end
end
end
diff --git a/lib/gitlab/prometheus/queries/environment_query.rb b/lib/gitlab/prometheus/queries/environment_query.rb
index 1d17d3cfd56..b62910c8de6 100644
--- a/lib/gitlab/prometheus/queries/environment_query.rb
+++ b/lib/gitlab/prometheus/queries/environment_query.rb
@@ -19,6 +19,11 @@ module Gitlab
}
end
end
+
+ def self.transform_reactive_result(result)
+ result[:metrics] = result.delete :data
+ result
+ end
end
end
end
diff --git a/lib/gitlab/prometheus/queries/matched_metrics_query.rb b/lib/gitlab/prometheus/queries/matched_metric_query.rb
index 4c3edccc71a..d920e9a749f 100644
--- a/lib/gitlab/prometheus/queries/matched_metrics_query.rb
+++ b/lib/gitlab/prometheus/queries/matched_metric_query.rb
@@ -1,7 +1,7 @@
module Gitlab
module Prometheus
module Queries
- class MatchedMetricsQuery < BaseQuery
+ class MatchedMetricQuery < BaseQuery
MAX_QUERY_ITEMS = 40.freeze
def query
@@ -18,7 +18,7 @@ module Gitlab
private
def groups_data
- metrics_groups = groups_with_active_metrics(Gitlab::Prometheus::MetricGroup.all)
+ metrics_groups = groups_with_active_metrics(Gitlab::Prometheus::MetricGroup.common_metrics)
lookup = active_series_lookup(metrics_groups)
groups = {}
diff --git a/lib/gitlab/prometheus/queries/query_additional_metrics.rb b/lib/gitlab/prometheus/queries/query_additional_metrics.rb
index 7ac6162b54d..aad76e335af 100644
--- a/lib/gitlab/prometheus/queries/query_additional_metrics.rb
+++ b/lib/gitlab/prometheus/queries/query_additional_metrics.rb
@@ -2,10 +2,17 @@ module Gitlab
module Prometheus
module Queries
module QueryAdditionalMetrics
- def query_metrics(query_context)
+ def query_metrics(project, query_context)
+ matched_metrics(project).map(&query_group(query_context))
+ .select(&method(:group_with_any_metrics))
+ end
+
+ protected
+
+ def query_group(query_context)
query_processor = method(:process_query).curry[query_context]
- groups = matched_metrics.map do |group|
+ lambda do |group|
metrics = group.metrics.map do |metric|
{
title: metric.title,
@@ -21,8 +28,6 @@ module Gitlab
metrics: metrics.select(&method(:metric_with_any_queries))
}
end
-
- groups.select(&method(:group_with_any_metrics))
end
private
@@ -60,8 +65,8 @@ module Gitlab
@available_metrics ||= client_label_values || []
end
- def matched_metrics
- result = Gitlab::Prometheus::MetricGroup.all.map do |group|
+ def matched_metrics(project)
+ result = Gitlab::Prometheus::MetricGroup.for_project(project).map do |group|
group.metrics.select! do |metric|
metric.required_metrics.all?(&available_metrics.method(:include?))
end
@@ -72,12 +77,17 @@ module Gitlab
end
def common_query_context(environment, timeframe_start:, timeframe_end:)
- {
- timeframe_start: timeframe_start,
- timeframe_end: timeframe_end,
+ base_query_context(timeframe_start, timeframe_end).merge({
ci_environment_slug: environment.slug,
- kube_namespace: environment.project.kubernetes_service&.actual_namespace || '',
+ kube_namespace: environment.project.deployment_platform&.actual_namespace || '',
environment_filter: %{container_name!="POD",environment="#{environment.slug}"}
+ })
+ end
+
+ def base_query_context(timeframe_start, timeframe_end)
+ {
+ timeframe_start: timeframe_start,
+ timeframe_end: timeframe_end
}
end
end
diff --git a/lib/gitlab/prometheus_client.rb b/lib/gitlab/prometheus_client.rb
index aa94614bf18..b66253a10e0 100644
--- a/lib/gitlab/prometheus_client.rb
+++ b/lib/gitlab/prometheus_client.rb
@@ -1,12 +1,13 @@
module Gitlab
- PrometheusError = Class.new(StandardError)
-
# Helper methods to interact with Prometheus network services & resources
class PrometheusClient
- attr_reader :api_url
+ Error = Class.new(StandardError)
+ QueryError = Class.new(Gitlab::PrometheusClient::Error)
+
+ attr_reader :rest_client, :headers
- def initialize(api_url:)
- @api_url = api_url
+ def initialize(rest_client)
+ @rest_client = rest_client
end
def ping
@@ -22,10 +23,10 @@ module Gitlab
def query_range(query, start: 8.hours.ago, stop: Time.now)
get_result('matrix') do
json_api_get('query_range',
- query: query,
- start: start.to_f,
- end: stop.to_f,
- step: 1.minute.to_i)
+ query: query,
+ start: start.to_f,
+ end: stop.to_f,
+ step: 1.minute.to_i)
end
end
@@ -40,39 +41,48 @@ module Gitlab
private
def json_api_get(type, args = {})
- get(join_api_url(type, args))
+ path = ['api', 'v1', type].join('/')
+ get(path, args)
+ rescue JSON::ParserError
+ raise PrometheusClient::Error, 'Parsing response failed'
rescue Errno::ECONNREFUSED
- raise PrometheusError, 'Connection refused'
- end
-
- def join_api_url(type, args = {})
- url = URI.parse(api_url)
- rescue URI::Error
- raise PrometheusError, "Invalid API URL: #{api_url}"
- else
- url.path = [url.path.sub(%r{/+\z}, ''), 'api', 'v1', type].join('/')
- url.query = args.to_query
-
- url.to_s
+ raise PrometheusClient::Error, 'Connection refused'
end
- def get(url)
- handle_response(HTTParty.get(url))
+ def get(path, args)
+ response = rest_client[path].get(params: args)
+ handle_response(response)
rescue SocketError
- raise PrometheusError, "Can't connect to #{url}"
+ raise PrometheusClient::Error, "Can't connect to #{rest_client.url}"
rescue OpenSSL::SSL::SSLError
- raise PrometheusError, "#{url} contains invalid SSL data"
- rescue HTTParty::Error
- raise PrometheusError, "Network connection error"
+ raise PrometheusClient::Error, "#{rest_client.url} contains invalid SSL data"
+ rescue RestClient::ExceptionWithResponse => ex
+ if ex.response
+ handle_exception_response(ex.response)
+ else
+ raise PrometheusClient::Error, "Network connection error"
+ end
+ rescue RestClient::Exception
+ raise PrometheusClient::Error, "Network connection error"
end
def handle_response(response)
+ json_data = JSON.parse(response.body)
+ if response.code == 200 && json_data['status'] == 'success'
+ json_data['data'] || {}
+ else
+ raise PrometheusClient::Error, "#{response.code} - #{response.body}"
+ end
+ end
+
+ def handle_exception_response(response)
if response.code == 200 && response['status'] == 'success'
response['data'] || {}
elsif response.code == 400
- raise PrometheusError, response['error'] || 'Bad data received'
+ json_data = JSON.parse(response.body)
+ raise PrometheusClient::QueryError, json_data['error'] || 'Bad data received'
else
- raise PrometheusError, "#{response.code} - #{response.body}"
+ raise PrometheusClient::Error, "#{response.code} - #{response.body}"
end
end
diff --git a/lib/gitlab/protocol_access.rb b/lib/gitlab/protocol_access.rb
index 09fa14764e6..2819c7d062c 100644
--- a/lib/gitlab/protocol_access.rb
+++ b/lib/gitlab/protocol_access.rb
@@ -1,14 +1,12 @@
module Gitlab
module ProtocolAccess
- extend Gitlab::CurrentSettings
-
def self.allowed?(protocol)
if protocol == 'web'
true
- elsif current_application_settings.enabled_git_access_protocol.blank?
+ elsif Gitlab::CurrentSettings.enabled_git_access_protocol.blank?
true
else
- protocol == current_application_settings.enabled_git_access_protocol
+ protocol == Gitlab::CurrentSettings.enabled_git_access_protocol
end
end
end
diff --git a/lib/gitlab/query_limiting.rb b/lib/gitlab/query_limiting.rb
new file mode 100644
index 00000000000..9f69a9e4a39
--- /dev/null
+++ b/lib/gitlab/query_limiting.rb
@@ -0,0 +1,36 @@
+module Gitlab
+ module QueryLimiting
+ # Returns true if we should enable tracking of query counts.
+ #
+ # This is only enabled in production/staging if we're running on GitLab.com.
+ # This ensures we don't produce any errors that users can't do anything
+ # about themselves.
+ def self.enable?
+ Rails.env.development? || Rails.env.test?
+ end
+
+ # Allows the current request to execute any number of SQL queries.
+ #
+ # This method should _only_ be used when there's a corresponding issue to
+ # reduce the number of queries.
+ #
+ # The issue URL is only meant to push developers into creating an issue
+ # instead of blindly whitelisting offending blocks of code.
+ def self.whitelist(issue_url)
+ return unless enable_whitelist?
+
+ unless issue_url.start_with?('https://')
+ raise(
+ ArgumentError,
+ 'You must provide a valid issue URL in order to whitelist a block of code'
+ )
+ end
+
+ Transaction&.current&.whitelisted = true
+ end
+
+ def self.enable_whitelist?
+ Rails.env.development? || Rails.env.test?
+ end
+ end
+end
diff --git a/lib/gitlab/query_limiting/active_support_subscriber.rb b/lib/gitlab/query_limiting/active_support_subscriber.rb
new file mode 100644
index 00000000000..4c83581c4b1
--- /dev/null
+++ b/lib/gitlab/query_limiting/active_support_subscriber.rb
@@ -0,0 +1,13 @@
+module Gitlab
+ module QueryLimiting
+ class ActiveSupportSubscriber < ActiveSupport::Subscriber
+ attach_to :active_record
+
+ def sql(event)
+ unless event.payload[:name] == 'CACHE'
+ Transaction.current&.increment
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/query_limiting/middleware.rb b/lib/gitlab/query_limiting/middleware.rb
new file mode 100644
index 00000000000..949ae79a047
--- /dev/null
+++ b/lib/gitlab/query_limiting/middleware.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module QueryLimiting
+ # Middleware for reporting (or raising) when a request performs more than a
+ # certain amount of database queries.
+ class Middleware
+ CONTROLLER_KEY = 'action_controller.instance'.freeze
+ ENDPOINT_KEY = 'api.endpoint'.freeze
+
+ def initialize(app)
+ @app = app
+ end
+
+ def call(env)
+ transaction, retval = Transaction.run do
+ @app.call(env)
+ end
+
+ transaction.action = action_name(env)
+ transaction.act_upon_results
+
+ retval
+ end
+
+ def action_name(env)
+ if env[CONTROLLER_KEY]
+ action_for_rails(env)
+ elsif env[ENDPOINT_KEY]
+ action_for_grape(env)
+ end
+ end
+
+ private
+
+ def action_for_rails(env)
+ controller = env[CONTROLLER_KEY]
+ action = "#{controller.class.name}##{controller.action_name}"
+
+ if controller.content_type == 'text/html'
+ action
+ else
+ "#{action} (#{controller.content_type})"
+ end
+ end
+
+ def action_for_grape(env)
+ endpoint = env[ENDPOINT_KEY]
+ route = endpoint.route rescue nil
+
+ "#{route.request_method} #{route.path}" if route
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/query_limiting/transaction.rb b/lib/gitlab/query_limiting/transaction.rb
new file mode 100644
index 00000000000..66d7d9275cf
--- /dev/null
+++ b/lib/gitlab/query_limiting/transaction.rb
@@ -0,0 +1,77 @@
+module Gitlab
+ module QueryLimiting
+ class Transaction
+ THREAD_KEY = :__gitlab_query_counts_transaction
+
+ attr_accessor :count, :whitelisted
+
+ # The name of the action (e.g. `UsersController#show`) that is being
+ # executed.
+ attr_accessor :action
+
+ # The maximum number of SQL queries that can be executed in a request. For
+ # the sake of keeping things simple we hardcode this value here, it's not
+ # supposed to be changed very often anyway.
+ THRESHOLD = 100
+
+ # Error that is raised whenever exceeding the maximum number of queries.
+ ThresholdExceededError = Class.new(StandardError)
+
+ def self.current
+ Thread.current[THREAD_KEY]
+ end
+
+ # Starts a new transaction and returns it and the blocks' return value.
+ #
+ # Example:
+ #
+ # transaction, retval = Transaction.run do
+ # 10
+ # end
+ #
+ # retval # => 10
+ def self.run
+ transaction = new
+ Thread.current[THREAD_KEY] = transaction
+
+ [transaction, yield]
+ ensure
+ Thread.current[THREAD_KEY] = nil
+ end
+
+ def initialize
+ @action = nil
+ @count = 0
+ @whitelisted = false
+ end
+
+ # Sends a notification based on the number of executed SQL queries.
+ def act_upon_results
+ return unless threshold_exceeded?
+
+ error = ThresholdExceededError.new(error_message)
+
+ raise(error) if raise_error?
+ end
+
+ def increment
+ @count += 1 unless whitelisted
+ end
+
+ def raise_error?
+ Rails.env.test?
+ end
+
+ def threshold_exceeded?
+ count > THRESHOLD
+ end
+
+ def error_message
+ header = 'Too many SQL queries were executed'
+ header += " in #{action}" if action
+
+ "#{header}: a maximum of #{THRESHOLD} is allowed but #{count} SQL queries were executed"
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/quick_actions/command_definition.rb b/lib/gitlab/quick_actions/command_definition.rb
index 3937d9c153a..96415271316 100644
--- a/lib/gitlab/quick_actions/command_definition.rb
+++ b/lib/gitlab/quick_actions/command_definition.rb
@@ -24,15 +24,14 @@ module Gitlab
action_block.nil?
end
- def available?(opts)
+ def available?(context)
return true unless condition_block
- context = OpenStruct.new(opts)
context.instance_exec(&condition_block)
end
- def explain(context, opts, arg)
- return unless available?(opts)
+ def explain(context, arg)
+ return unless available?(context)
if explanation.respond_to?(:call)
execute_block(explanation, context, arg)
@@ -41,15 +40,13 @@ module Gitlab
end
end
- def execute(context, opts, arg)
- return if noop? || !available?(opts)
+ def execute(context, arg)
+ return if noop? || !available?(context)
execute_block(action_block, context, arg)
end
- def to_h(opts)
- context = OpenStruct.new(opts)
-
+ def to_h(context)
desc = description
if desc.respond_to?(:call)
desc = context.instance_exec(&desc) rescue ''
diff --git a/lib/gitlab/quick_actions/dsl.rb b/lib/gitlab/quick_actions/dsl.rb
index 536765305e1..d82dccd0db5 100644
--- a/lib/gitlab/quick_actions/dsl.rb
+++ b/lib/gitlab/quick_actions/dsl.rb
@@ -62,9 +62,8 @@ module Gitlab
# Allows to define conditions that must be met in order for the command
# to be returned by `.command_names` & `.command_definitions`.
- # It accepts a block that will be evaluated with the context given to
- # `CommandDefintion#to_h`.
- #
+ # It accepts a block that will be evaluated with the context
+ # of a QuickActions::InterpretService instance
# Example:
#
# condition do
diff --git a/lib/gitlab/quick_actions/extractor.rb b/lib/gitlab/quick_actions/extractor.rb
index 3ebfa3bd4b8..075ff91700c 100644
--- a/lib/gitlab/quick_actions/extractor.rb
+++ b/lib/gitlab/quick_actions/extractor.rb
@@ -29,7 +29,7 @@ module Gitlab
# commands = extractor.extract_commands(msg) #=> [['labels', '~foo ~"bar baz"']]
# msg #=> "hello\nworld"
# ```
- def extract_commands(content, opts = {})
+ def extract_commands(content)
return [content, []] unless content
content = content.dup
@@ -37,7 +37,7 @@ module Gitlab
commands = []
content.delete!("\r")
- content.gsub!(commands_regex(opts)) do
+ content.gsub!(commands_regex) do
if $~[:cmd]
commands << [$~[:cmd], $~[:arg]].reject(&:blank?)
''
@@ -60,8 +60,8 @@ module Gitlab
# It looks something like:
#
# /^\/(?<cmd>close|reopen|...)(?:( |$))(?<arg>[^\/\n]*)(?:\n|$)/
- def commands_regex(opts)
- names = command_names(opts).map(&:to_s)
+ def commands_regex
+ names = command_names.map(&:to_s)
@commands_regex ||= %r{
(?<code>
@@ -126,13 +126,14 @@ module Gitlab
command << match_data[1] unless match_data[1].empty?
commands << command
end
+
content = substitution.perform_substitution(self, content)
end
[content, commands]
end
- def command_names(opts)
+ def command_names
command_definitions.flat_map do |command|
next if command.noop?
diff --git a/lib/gitlab/quick_actions/spend_time_and_date_separator.rb b/lib/gitlab/quick_actions/spend_time_and_date_separator.rb
new file mode 100644
index 00000000000..7328c517a30
--- /dev/null
+++ b/lib/gitlab/quick_actions/spend_time_and_date_separator.rb
@@ -0,0 +1,54 @@
+module Gitlab
+ module QuickActions
+ # This class takes spend command argument
+ # and separates date and time from spend command arguments if it present
+ # example:
+ # spend_command_time_and_date = "15m 2017-01-02"
+ # SpendTimeAndDateSeparator.new(spend_command_time_and_date).execute
+ # => [900, Mon, 02 Jan 2017]
+ # if date doesn't present return time with current date
+ # in other cases return nil
+ class SpendTimeAndDateSeparator
+ DATE_REGEX = %r{(\d{2,4}[/\-.]\d{1,2}[/\-.]\d{1,2})}
+
+ def initialize(spend_command_arg)
+ @spend_arg = spend_command_arg
+ end
+
+ def execute
+ return if @spend_arg.blank?
+ return [get_time, DateTime.now.to_date] unless date_present?
+ return unless valid_date?
+
+ [get_time, get_date]
+ end
+
+ private
+
+ def get_time
+ raw_time = @spend_arg.gsub(DATE_REGEX, '')
+ Gitlab::TimeTrackingFormatter.parse(raw_time)
+ end
+
+ def get_date
+ string_date = @spend_arg.match(DATE_REGEX)[0]
+ Date.parse(string_date)
+ end
+
+ def date_present?
+ DATE_REGEX =~ @spend_arg
+ end
+
+ def valid_date?
+ string_date = @spend_arg.match(DATE_REGEX)[0]
+ date = Date.parse(string_date) rescue nil
+
+ date_past_or_today?(date)
+ end
+
+ def date_past_or_today?(date)
+ date&.past? || date&.today?
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/recaptcha.rb b/lib/gitlab/recaptcha.rb
index c463dd487a0..c9efa28d7e7 100644
--- a/lib/gitlab/recaptcha.rb
+++ b/lib/gitlab/recaptcha.rb
@@ -1,12 +1,10 @@
module Gitlab
module Recaptcha
- extend Gitlab::CurrentSettings
-
def self.load_configurations!
- if current_application_settings.recaptcha_enabled
+ if Gitlab::CurrentSettings.recaptcha_enabled
::Recaptcha.configure do |config|
- config.public_key = current_application_settings.recaptcha_site_key
- config.private_key = current_application_settings.recaptcha_private_key
+ config.public_key = Gitlab::CurrentSettings.recaptcha_site_key
+ config.private_key = Gitlab::CurrentSettings.recaptcha_private_key
end
true
@@ -14,7 +12,7 @@ module Gitlab
end
def self.enabled?
- current_application_settings.recaptcha_enabled
+ Gitlab::CurrentSettings.recaptcha_enabled
end
end
end
diff --git a/lib/gitlab/redis/cache.rb b/lib/gitlab/redis/cache.rb
index 9bf019b72e6..a991933e910 100644
--- a/lib/gitlab/redis/cache.rb
+++ b/lib/gitlab/redis/cache.rb
@@ -1,5 +1,5 @@
# please require all dependencies below:
-require_relative 'wrapper' unless defined?(::Gitlab::Redis::Wrapper)
+require_relative 'wrapper' unless defined?(::Rails) && ::Rails.root.present?
module Gitlab
module Redis
diff --git a/lib/gitlab/redis/wrapper.rb b/lib/gitlab/redis/wrapper.rb
index 8ad06480575..4178b436acf 100644
--- a/lib/gitlab/redis/wrapper.rb
+++ b/lib/gitlab/redis/wrapper.rb
@@ -24,6 +24,7 @@ module Gitlab
# the pool will be used in a multi-threaded context
size += Sidekiq.options[:concurrency]
end
+
size
end
@@ -104,6 +105,7 @@ module Gitlab
db_numbers = queries["db"] if queries.key?("db")
config[:db] = db_numbers[0].to_i if db_numbers.any?
end
+
config
else
redis_hash = ::Redis::Store::Factory.extract_host_options_from_uri(redis_url)
diff --git a/lib/gitlab/reference_extractor.rb b/lib/gitlab/reference_extractor.rb
index bc836dcc08d..9ff82d628c0 100644
--- a/lib/gitlab/reference_extractor.rb
+++ b/lib/gitlab/reference_extractor.rb
@@ -1,7 +1,7 @@
module Gitlab
# Extract possible GFM references from an arbitrary String for further processing.
class ReferenceExtractor < Banzai::ReferenceExtractor
- REFERABLES = %i(user issue label milestone merge_request snippet commit commit_range directly_addressed_user).freeze
+ REFERABLES = %i(user issue label milestone merge_request snippet commit commit_range directly_addressed_user epic).freeze
attr_accessor :project, :current_user, :author
def initialize(project, current_user = nil)
diff --git a/lib/gitlab/regex.rb b/lib/gitlab/regex.rb
index 58f6245579a..ac3de2a8f71 100644
--- a/lib/gitlab/regex.rb
+++ b/lib/gitlab/regex.rb
@@ -25,7 +25,7 @@ module Gitlab
# See https://github.com/docker/distribution/blob/master/reference/regexp.go.
#
def container_repository_name_regex
- @container_repository_regex ||= %r{\A[a-z0-9]+(?:[-._/][a-z0-9]+)*\Z}
+ @container_repository_regex ||= %r{\A[a-z0-9]+((?:[._/]|__|[-])[a-z0-9]+)*\Z}
end
##
@@ -37,15 +37,19 @@ module Gitlab
end
def environment_name_regex_chars
- 'a-zA-Z0-9_/\\$\\{\\}\\. -'
+ 'a-zA-Z0-9_/\\$\\{\\}\\. \\-'
+ end
+
+ def environment_name_regex_chars_without_slash
+ 'a-zA-Z0-9_\\$\\{\\}\\. -'
end
def environment_name_regex
- @environment_name_regex ||= /\A[#{environment_name_regex_chars}]+\z/.freeze
+ @environment_name_regex ||= /\A[#{environment_name_regex_chars_without_slash}]([#{environment_name_regex_chars}]*[#{environment_name_regex_chars_without_slash}])?\z/.freeze
end
def environment_name_regex_message
- "can contain only letters, digits, '-', '_', '/', '$', '{', '}', '.', and spaces"
+ "can contain only letters, digits, '-', '_', '/', '$', '{', '}', '.', and spaces, but it cannot start or end with '/'"
end
def kubernetes_namespace_regex
@@ -65,5 +69,9 @@ module Gitlab
"can contain only lowercase letters, digits, and '-'. " \
"Must start with a letter, and cannot end with '-'"
end
+
+ def build_trace_section_regex
+ @build_trace_section_regexp ||= /section_((?:start)|(?:end)):(\d+):([a-zA-Z0-9_.-]+)\r\033\[0K/.freeze
+ end
end
end
diff --git a/lib/gitlab/repo_path.rb b/lib/gitlab/repo_path.rb
index 3591fa9145e..79265cf952d 100644
--- a/lib/gitlab/repo_path.rb
+++ b/lib/gitlab/repo_path.rb
@@ -30,7 +30,7 @@ module Gitlab
raise NotFoundError.new("No known storage path matches #{repo_path.inspect}")
end
- result.sub(/\A\/*/, '')
+ result.sub(%r{\A/*}, '')
end
def self.find_project(project_path)
diff --git a/lib/gitlab/routing.rb b/lib/gitlab/routing.rb
index e57890f1143..2c994536060 100644
--- a/lib/gitlab/routing.rb
+++ b/lib/gitlab/routing.rb
@@ -40,5 +40,24 @@ module Gitlab
def self.url_helpers
@url_helpers ||= Gitlab::Application.routes.url_helpers
end
+
+ def self.redirect_legacy_paths(router, *paths)
+ build_redirect_path = lambda do |request, _params, path|
+ # Only replace the last occurence of `path`.
+ #
+ # `request.fullpath` includes the querystring
+ new_path = request.path.sub(%r{/#{path}(/*)(?!.*#{path})}, "/-/#{path}\\1")
+ new_path << "?#{request.query_string}" if request.query_string.present?
+
+ new_path
+ end
+
+ paths.each do |path|
+ router.match "/#{path}(/*rest)",
+ via: [:get, :post, :patch, :delete],
+ to: router.redirect { |params, request| build_redirect_path.call(request, params, path) },
+ as: "legacy_#{path}_redirect"
+ end
+ end
end
end
diff --git a/lib/gitlab/saml/auth_hash.rb b/lib/gitlab/saml/auth_hash.rb
deleted file mode 100644
index 67a5f368bdb..00000000000
--- a/lib/gitlab/saml/auth_hash.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-module Gitlab
- module Saml
- class AuthHash < Gitlab::OAuth::AuthHash
- def groups
- get_raw(Gitlab::Saml::Config.groups)
- end
-
- private
-
- def get_raw(key)
- # Needs to call `all` because of https://git.io/vVo4u
- # otherwise just the first value is returned
- auth_hash.extra[:raw_info].all[key]
- end
- end
- end
-end
diff --git a/lib/gitlab/saml/config.rb b/lib/gitlab/saml/config.rb
deleted file mode 100644
index 574c3a4b28c..00000000000
--- a/lib/gitlab/saml/config.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-module Gitlab
- module Saml
- class Config
- class << self
- def options
- Gitlab.config.omniauth.providers.find { |provider| provider.name == 'saml' }
- end
-
- def groups
- options[:groups_attribute]
- end
-
- def external_groups
- options[:external_groups]
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/saml/user.rb b/lib/gitlab/saml/user.rb
deleted file mode 100644
index 0f323a9e8b2..00000000000
--- a/lib/gitlab/saml/user.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-# SAML extension for User model
-#
-# * Find GitLab user based on SAML uid and provider
-# * Create new user from SAML data
-#
-module Gitlab
- module Saml
- class User < Gitlab::OAuth::User
- def save
- super('SAML')
- end
-
- def gl_user
- if auto_link_ldap_user?
- @user ||= find_or_create_ldap_user
- end
-
- @user ||= find_by_uid_and_provider
-
- if auto_link_saml_user?
- @user ||= find_by_email
- end
-
- if signup_enabled?
- @user ||= build_new_user
- end
-
- if external_users_enabled? && @user
- # Check if there is overlap between the user's groups and the external groups
- # setting then set user as external or internal.
- @user.external =
- if (auth_hash.groups & Gitlab::Saml::Config.external_groups).empty?
- false
- else
- true
- end
- end
-
- @user
- end
-
- def find_by_email
- if auth_hash.has_attribute?(:email)
- user = ::User.find_by(email: auth_hash.email.downcase)
- user.identities.new(extern_uid: auth_hash.uid, provider: auth_hash.provider) if user
- user
- end
- end
-
- def changed?
- return true unless gl_user
- gl_user.changed? || gl_user.identities.any?(&:changed?)
- end
-
- protected
-
- def auto_link_saml_user?
- Gitlab.config.omniauth.auto_link_saml_user
- end
-
- def external_users_enabled?
- !Gitlab::Saml::Config.external_groups.nil?
- end
-
- def auth_hash=(auth_hash)
- @auth_hash = Gitlab::Saml::AuthHash.new(auth_hash)
- end
- end
- end
-end
diff --git a/lib/gitlab/search_results.rb b/lib/gitlab/search_results.rb
index efe8095beea..757ef71b95a 100644
--- a/lib/gitlab/search_results.rb
+++ b/lib/gitlab/search_results.rb
@@ -1,7 +1,9 @@
module Gitlab
class SearchResults
class FoundBlob
- attr_reader :id, :filename, :basename, :ref, :startline, :data
+ include EncodingHelper
+
+ attr_reader :id, :filename, :basename, :ref, :startline, :data, :project_id
def initialize(opts = {})
@id = opts.fetch(:id, nil)
@@ -9,7 +11,9 @@ module Gitlab
@basename = opts.fetch(:basename, nil)
@ref = opts.fetch(:ref, nil)
@startline = opts.fetch(:startline, nil)
- @data = opts.fetch(:data, nil)
+ @data = encode_utf8(opts.fetch(:data, nil))
+ @per_page = opts.fetch(:per_page, 20)
+ @project_id = opts.fetch(:project_id, nil)
end
def path
@@ -21,61 +25,86 @@ module Gitlab
end
end
- attr_reader :current_user, :query
+ attr_reader :current_user, :query, :per_page
# Limit search results by passed projects
# It allows us to search only for projects user has access to
attr_reader :limit_projects
- def initialize(current_user, limit_projects, query)
+ # Whether a custom filter is used to restrict scope of projects.
+ # If the default filter (which lists all projects user has access to)
+ # is used, we can skip it when filtering merge requests and optimize the
+ # query
+ attr_reader :default_project_filter
+
+ def initialize(current_user, limit_projects, query, default_project_filter: false, per_page: 20)
@current_user = current_user
@limit_projects = limit_projects || Project.all
- @query = Shellwords.shellescape(query) if query.present?
+ @query = query
+ @default_project_filter = default_project_filter
+ @per_page = per_page
end
- def objects(scope, page = nil)
- case scope
- when 'projects'
- projects.page(page).per(per_page)
- when 'issues'
- issues.page(page).per(per_page)
- when 'merge_requests'
- merge_requests.page(page).per(per_page)
- when 'milestones'
- milestones.page(page).per(per_page)
- else
- Kaminari.paginate_array([]).page(page).per(per_page)
- end
+ def objects(scope, page = nil, without_count = true)
+ collection = case scope
+ when 'projects'
+ projects.page(page).per(per_page)
+ when 'issues'
+ issues.page(page).per(per_page)
+ when 'merge_requests'
+ merge_requests.page(page).per(per_page)
+ when 'milestones'
+ milestones.page(page).per(per_page)
+ else
+ Kaminari.paginate_array([]).page(page).per(per_page)
+ end
+
+ without_count ? collection.without_count : collection
end
- def projects_count
- @projects_count ||= projects.count
+ def limited_projects_count
+ @limited_projects_count ||= projects.limit(count_limit).count
end
- def issues_count
- @issues_count ||= issues.count
+ def limited_issues_count
+ return @limited_issues_count if @limited_issues_count
+
+ # By default getting limited count (e.g. 1000+) is fast on issuable
+ # collections except for issues, where filtering both not confidential
+ # and confidential issues user has access to, is too complex.
+ # It's faster to try to fetch all public issues first, then only
+ # if necessary try to fetch all issues.
+ sum = issues(public_only: true).limit(count_limit).count
+ @limited_issues_count = sum < count_limit ? issues.limit(count_limit).count : sum
end
- def merge_requests_count
- @merge_requests_count ||= merge_requests.count
+ def limited_merge_requests_count
+ @limited_merge_requests_count ||= merge_requests.limit(count_limit).count
end
- def milestones_count
- @milestones_count ||= milestones.count
+ def limited_milestones_count
+ @limited_milestones_count ||= milestones.limit(count_limit).count
end
def single_commit_result?
false
end
+ def count_limit
+ 1001
+ end
+
private
def projects
limit_projects.search(query)
end
- def issues
- issues = IssuesFinder.new(current_user).execute.where(project_id: project_ids_relation)
+ def issues(finder_params = {})
+ issues = IssuesFinder.new(current_user, finder_params).execute
+ unless default_project_filter
+ issues = issues.where(project_id: project_ids_relation)
+ end
issues =
if query =~ /#(\d+)\z/
@@ -84,34 +113,35 @@ module Gitlab
issues.full_search(query)
end
- issues.order('updated_at DESC')
+ issues.reorder('updated_at DESC')
end
def milestones
milestones = Milestone.where(project_id: project_ids_relation)
milestones = milestones.search(query)
- milestones.order('updated_at DESC')
+ milestones.reorder('updated_at DESC')
end
def merge_requests
- merge_requests = MergeRequestsFinder.new(current_user).execute.in_projects(project_ids_relation)
+ merge_requests = MergeRequestsFinder.new(current_user).execute
+ unless default_project_filter
+ merge_requests = merge_requests.in_projects(project_ids_relation)
+ end
+
merge_requests =
if query =~ /[#!](\d+)\z/
merge_requests.where(iid: $1)
else
merge_requests.full_search(query)
end
- merge_requests.order('updated_at DESC')
+
+ merge_requests.reorder('updated_at DESC')
end
def default_scope
'projects'
end
- def per_page
- 20
- end
-
def project_ids_relation
limit_projects.select(:id).reorder(nil)
end
diff --git a/lib/gitlab/seeder.rb b/lib/gitlab/seeder.rb
index f9ab9bd466f..98f005cb61b 100644
--- a/lib/gitlab/seeder.rb
+++ b/lib/gitlab/seeder.rb
@@ -5,10 +5,17 @@ module DeliverNever
end
end
+module MuteNotifications
+ def new_note(note)
+ end
+end
+
module Gitlab
class Seeder
def self.quiet
+ mute_notifications
mute_mailer
+
SeedFu.quiet = true
yield
@@ -17,6 +24,10 @@ module Gitlab
puts "\nOK".color(:green)
end
+ def self.mute_notifications
+ NotificationService.prepend(MuteNotifications)
+ end
+
def self.mute_mailer
ActionMailer::MessageDelivery.prepend(DeliverNever)
end
diff --git a/lib/gitlab/sentry.rb b/lib/gitlab/sentry.rb
index 159d0e7952e..4a22fc80f75 100644
--- a/lib/gitlab/sentry.rb
+++ b/lib/gitlab/sentry.rb
@@ -1,9 +1,7 @@
module Gitlab
module Sentry
- extend Gitlab::CurrentSettings
-
def self.enabled?
- Rails.env.production? && current_application_settings.sentry_enabled?
+ Rails.env.production? && Gitlab::CurrentSettings.sentry_enabled?
end
def self.context(current_user = nil)
diff --git a/lib/gitlab/setup_helper.rb b/lib/gitlab/setup_helper.rb
new file mode 100644
index 00000000000..07d7c91cb5d
--- /dev/null
+++ b/lib/gitlab/setup_helper.rb
@@ -0,0 +1,61 @@
+module Gitlab
+ module SetupHelper
+ class << self
+ # We cannot create config.toml files for all possible Gitaly configuations.
+ # For instance, if Gitaly is running on another machine then it makes no
+ # sense to write a config.toml file on the current machine. This method will
+ # only generate a configuration for the most common and simplest case: when
+ # we have exactly one Gitaly process and we are sure it is running locally
+ # because it uses a Unix socket.
+ # For development and testing purposes, an extra storage is added to gitaly,
+ # which is not known to Rails, but must be explicitly stubbed.
+ def gitaly_configuration_toml(gitaly_dir, gitaly_ruby: true)
+ storages = []
+ address = nil
+
+ Gitlab.config.repositories.storages.each do |key, val|
+ if address
+ if address != val['gitaly_address']
+ raise ArgumentError, "Your gitlab.yml contains more than one gitaly_address."
+ end
+ elsif URI(val['gitaly_address']).scheme != 'unix'
+ raise ArgumentError, "Automatic config.toml generation only supports 'unix:' addresses."
+ else
+ address = val['gitaly_address']
+ end
+
+ storages << { name: key, path: val['path'] }
+ end
+
+ if Rails.env.test?
+ storages << { name: 'test_second_storage', path: Rails.root.join('tmp', 'tests', 'second_storage').to_s }
+ end
+
+ config = { socket_path: address.sub(/\Aunix:/, ''), storage: storages }
+ config[:auth] = { token: 'secret' } if Rails.env.test?
+ config[:'gitaly-ruby'] = { dir: File.join(gitaly_dir, 'ruby') } if gitaly_ruby
+ config[:'gitlab-shell'] = { dir: Gitlab.config.gitlab_shell.path }
+ config[:bin_dir] = Gitlab.config.gitaly.client_path
+
+ TomlRB.dump(config)
+ end
+
+ # rubocop:disable Rails/Output
+ def create_gitaly_configuration(dir, force: false)
+ config_path = File.join(dir, 'config.toml')
+ FileUtils.rm_f(config_path) if force
+
+ File.open(config_path, File::WRONLY | File::CREAT | File::EXCL) do |f|
+ f.puts gitaly_configuration_toml(dir)
+ end
+ rescue Errno::EEXIST
+ puts "Skipping config.toml generation:"
+ puts "A configuration file already exists."
+ rescue ArgumentError => e
+ puts "Skipping config.toml generation:"
+ puts e.message
+ end
+ # rubocop:enable Rails/Output
+ end
+ end
+end
diff --git a/lib/gitlab/shell.rb b/lib/gitlab/shell.rb
index 81ecdf43ef9..dda7afc0999 100644
--- a/lib/gitlab/shell.rb
+++ b/lib/gitlab/shell.rb
@@ -65,15 +65,26 @@ module Gitlab
# Init new repository
#
- # storage - project's storage path
- # name - project path with namespace
+ # storage - project's storage name
+ # name - project disk path
#
# Ex.
# add_repository("/path/to/storage", "gitlab/gitlab-ci")
#
- # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/387
def add_repository(storage, name)
- Gitlab::Git::Repository.create(storage, name, bare: true, symlink_hooks_to: gitlab_shell_hooks_path)
+ relative_path = name.dup
+ relative_path << '.git' unless relative_path.end_with?('.git')
+
+ gitaly_migrate(:create_repository) do |is_enabled|
+ if is_enabled
+ repository = Gitlab::Git::Repository.new(storage, relative_path, '')
+ repository.gitaly_repository_client.create_repository
+ true
+ else
+ repo_path = File.join(Gitlab.config.repositories.storages[storage]['path'], relative_path)
+ Gitlab::Git::Repository.create(repo_path, bare: true, symlink_hooks_to: gitlab_shell_hooks_path)
+ end
+ end
rescue => err
Rails.logger.error("Failed to add repository #{storage}/#{name}: #{err}")
false
@@ -82,84 +93,100 @@ module Gitlab
# Import repository
#
# storage - project's storage path
- # name - project path with namespace
+ # name - project disk path
+ # url - URL to import from
#
# Ex.
- # import_repository("/path/to/storage", "gitlab/gitlab-ci", "https://github.com/randx/six.git")
+ # import_repository("/path/to/storage", "gitlab/gitlab-ci", "https://gitlab.com/gitlab-org/gitlab-test.git")
#
- # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/387
+ # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/874
def import_repository(storage, name, url)
- # Timeout should be less than 900 ideally, to prevent the memory killer
- # to silently kill the process without knowing we are timing out here.
- cmd = [gitlab_shell_projects_path, 'import-project',
- storage, "#{name}.git", url, "#{Gitlab.config.gitlab_shell.git_timeout}"]
- gitlab_shell_fast_execute_raise_error(cmd)
+ if url.start_with?('.', '/')
+ raise Error.new("don't use disk paths with import_repository: #{url.inspect}")
+ end
+
+ # The timeout ensures the subprocess won't hang forever
+ cmd = gitlab_projects(storage, "#{name}.git")
+ success = cmd.import_project(url, git_timeout)
+
+ raise Error, cmd.output unless success
+
+ success
end
# Fetch remote for repository
#
# repository - an instance of Git::Repository
# remote - remote name
+ # ssh_auth - SSH known_hosts data and a private key to use for public-key authentication
# forced - should we use --force flag?
# no_tags - should we use --no-tags flag?
#
# Ex.
# fetch_remote(my_repo, "upstream")
#
- # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/387
- def fetch_remote(repository, remote, ssh_auth: nil, forced: false, no_tags: false)
+ def fetch_remote(repository, remote, ssh_auth: nil, forced: false, no_tags: false, prune: true)
gitaly_migrate(:fetch_remote) do |is_enabled|
if is_enabled
- repository.gitaly_repository_client.fetch_remote(remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags)
+ repository.gitaly_repository_client.fetch_remote(remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags, timeout: git_timeout, prune: prune)
else
storage_path = Gitlab.config.repositories.storages[repository.storage]["path"]
- local_fetch_remote(storage_path, repository.relative_path, remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags)
+ local_fetch_remote(storage_path, repository.relative_path, remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags, prune: prune)
end
end
end
- # Move repository
+ # Move repository reroutes to mv_directory which is an alias for
+ # mv_namespace. Given the underlying implementation is a move action,
+ # indescriminate of what the folders might be.
+ #
# storage - project's storage path
- # path - project path with namespace
- # new_path - new project path with namespace
+ # path - project disk path
+ # new_path - new project disk path
#
# Ex.
# mv_repository("/path/to/storage", "gitlab/gitlab-ci", "randx/gitlab-ci-new")
#
- # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/387
+ # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/873
def mv_repository(storage, path, new_path)
- gitlab_shell_fast_execute([gitlab_shell_projects_path, 'mv-project',
- storage, "#{path}.git", "#{new_path}.git"])
+ return false if path.empty? || new_path.empty?
+
+ !!mv_directory(storage, "#{path}.git", "#{new_path}.git")
end
- # Fork repository to new namespace
+ # Fork repository to new path
# forked_from_storage - forked-from project's storage path
- # path - project path with namespace
+ # forked_from_disk_path - project disk path
# forked_to_storage - forked-to project's storage path
- # fork_namespace - namespace for forked project
+ # forked_to_disk_path - forked project disk path
#
# Ex.
- # fork_repository("/path/to/forked_from/storage", "gitlab/gitlab-ci", "/path/to/forked_to/storage", "randx")
+ # fork_repository("/path/to/forked_from/storage", "gitlab/gitlab-ci", "/path/to/forked_to/storage", "new-namespace/gitlab-ci")
#
- # Gitaly note: JV: not easy to migrate because this involves two Gitaly servers, not one.
- def fork_repository(forked_from_storage, path, forked_to_storage, fork_namespace)
- gitlab_shell_fast_execute([gitlab_shell_projects_path, 'fork-project',
- forked_from_storage, "#{path}.git", forked_to_storage,
- fork_namespace])
+ # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/817
+ def fork_repository(forked_from_storage, forked_from_disk_path, forked_to_storage, forked_to_disk_path)
+ gitlab_projects(forked_from_storage, "#{forked_from_disk_path}.git")
+ .fork_repository(forked_to_storage, "#{forked_to_disk_path}.git")
end
- # Remove repository from file system
+ # Removes a repository from file system, using rm_diretory which is an alias
+ # for rm_namespace. Given the underlying implementation removes the name
+ # passed as second argument on the passed storage.
#
# storage - project's storage path
- # name - project path with namespace
+ # name - project disk path
#
# Ex.
# remove_repository("/path/to/storage", "gitlab/gitlab-ci")
#
- # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/387
+ # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/873
def remove_repository(storage, name)
- gitlab_shell_fast_execute([gitlab_shell_projects_path,
- 'rm-project', storage, "#{name}.git"])
+ return false if name.empty?
+
+ !!rm_directory(storage, "#{name}.git")
+ rescue ArgumentError => e
+ Rails.logger.warn("Repository does not exist: #{e} at: #{name}.git")
+ false
end
# Add new key to gitlab-shell
@@ -168,6 +195,8 @@ module Gitlab
# add_key("key-42", "sha-rsa ...")
#
def add_key(key_id, key_content)
+ return unless self.authorized_keys_enabled?
+
gitlab_shell_fast_execute([gitlab_shell_keys_path,
'add-key', key_id, self.class.strip_key(key_content)])
end
@@ -177,6 +206,8 @@ module Gitlab
# Ex.
# batch_add_keys { |adder| adder.add_key("key-42", "sha-rsa ...") }
def batch_add_keys(&block)
+ return unless self.authorized_keys_enabled?
+
IO.popen(%W(#{gitlab_shell_path}/bin/gitlab-keys batch-add-keys), 'w') do |io|
yield(KeyAdder.new(io))
end
@@ -187,10 +218,11 @@ module Gitlab
# Ex.
# remove_key("key-342", "sha-rsa ...")
#
- def remove_key(key_id, key_content)
+ def remove_key(key_id, key_content = nil)
+ return unless self.authorized_keys_enabled?
+
args = [gitlab_shell_keys_path, 'rm-key', key_id]
args << key_content if key_content
-
gitlab_shell_fast_execute(args)
end
@@ -200,20 +232,81 @@ module Gitlab
# remove_all_keys
#
def remove_all_keys
+ return unless self.authorized_keys_enabled?
+
gitlab_shell_fast_execute([gitlab_shell_keys_path, 'clear'])
end
+ # Remove ssh keys from gitlab shell that are not in the DB
+ #
+ # Ex.
+ # remove_keys_not_found_in_db
+ #
+ def remove_keys_not_found_in_db
+ return unless self.authorized_keys_enabled?
+
+ Rails.logger.info("Removing keys not found in DB")
+
+ batch_read_key_ids do |ids_in_file|
+ ids_in_file.uniq!
+ keys_in_db = Key.where(id: ids_in_file)
+
+ next unless ids_in_file.size > keys_in_db.count # optimization
+
+ ids_to_remove = ids_in_file - keys_in_db.pluck(:id)
+ ids_to_remove.each do |id|
+ Rails.logger.info("Removing key-#{id} not found in DB")
+ remove_key("key-#{id}")
+ end
+ end
+ end
+
+ # Iterate over all ssh key IDs from gitlab shell, in batches
+ #
+ # Ex.
+ # batch_read_key_ids { |batch| keys = Key.where(id: batch) }
+ #
+ def batch_read_key_ids(batch_size: 100, &block)
+ return unless self.authorized_keys_enabled?
+
+ list_key_ids do |key_id_stream|
+ key_id_stream.lazy.each_slice(batch_size) do |lines|
+ key_ids = lines.map { |l| l.chomp.to_i }
+ yield(key_ids)
+ end
+ end
+ end
+
+ # Stream all ssh key IDs from gitlab shell, separated by newlines
+ #
+ # Ex.
+ # list_key_ids
+ #
+ def list_key_ids(&block)
+ return unless self.authorized_keys_enabled?
+
+ IO.popen(%W(#{gitlab_shell_path}/bin/gitlab-keys list-key-ids), &block)
+ end
+
# Add empty directory for storing repositories
#
# Ex.
# add_namespace("/path/to/storage", "gitlab")
#
- # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def add_namespace(storage, name)
- path = full_path(storage, name)
- FileUtils.mkdir_p(path, mode: 0770) unless exists?(storage, name)
+ Gitlab::GitalyClient.migrate(:add_namespace,
+ status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
+ if enabled
+ gitaly_namespace_client(storage).add(name)
+ else
+ path = full_path(storage, name)
+ FileUtils.mkdir_p(path, mode: 0770) unless exists?(storage, name)
+ end
+ end
rescue Errno::EEXIST => e
Rails.logger.warn("Directory exists as a file: #{e} at: #{path}")
+ rescue GRPC::InvalidArgument => e
+ raise ArgumentError, e.message
end
# Remove directory from repositories storage
@@ -222,22 +315,40 @@ module Gitlab
# Ex.
# rm_namespace("/path/to/storage", "gitlab")
#
- # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def rm_namespace(storage, name)
- FileUtils.rm_r(full_path(storage, name), force: true)
+ Gitlab::GitalyClient.migrate(:remove_namespace,
+ status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
+ if enabled
+ gitaly_namespace_client(storage).remove(name)
+ else
+ FileUtils.rm_r(full_path(storage, name), force: true)
+ end
+ end
+ rescue GRPC::InvalidArgument => e
+ raise ArgumentError, e.message
end
+ alias_method :rm_directory, :rm_namespace
# Move namespace directory inside repositories storage
#
# Ex.
# mv_namespace("/path/to/storage", "gitlab", "gitlabhq")
#
- # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def mv_namespace(storage, old_name, new_name)
- return false if exists?(storage, new_name) || !exists?(storage, old_name)
+ Gitlab::GitalyClient.migrate(:rename_namespace,
+ status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
+ if enabled
+ gitaly_namespace_client(storage).rename(old_name, new_name)
+ else
+ return false if exists?(storage, new_name) || !exists?(storage, old_name)
- FileUtils.mv(full_path(storage, old_name), full_path(storage, new_name))
+ FileUtils.mv(full_path(storage, old_name), full_path(storage, new_name))
+ end
+ end
+ rescue GRPC::InvalidArgument
+ false
end
+ alias_method :mv_directory, :mv_namespace
def url_to_repo(path)
Gitlab.config.gitlab_shell.ssh_path_prefix + "#{path}.git"
@@ -260,7 +371,14 @@ module Gitlab
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def exists?(storage, dir_name)
- File.exist?(full_path(storage, dir_name))
+ Gitlab::GitalyClient.migrate(:namespace_exists,
+ status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
+ if enabled
+ gitaly_namespace_client(storage).exists?(dir_name)
+ else
+ File.exist?(full_path(storage, dir_name))
+ end
+ end
end
protected
@@ -291,26 +409,45 @@ module Gitlab
File.join(gitlab_shell_path, 'bin', 'gitlab-keys')
end
+ def authorized_keys_enabled?
+ # Return true if nil to ensure the authorized_keys methods work while
+ # fixing the authorized_keys file during migration.
+ return true if Gitlab::CurrentSettings.current_application_settings.authorized_keys_enabled.nil?
+
+ Gitlab::CurrentSettings.current_application_settings.authorized_keys_enabled
+ end
+
private
- def local_fetch_remote(storage, name, remote, ssh_auth: nil, forced: false, no_tags: false)
- args = [gitlab_shell_projects_path, 'fetch-remote', storage, name, remote, "#{Gitlab.config.gitlab_shell.git_timeout}"]
- args << '--force' if forced
- args << '--no-tags' if no_tags
+ def gitlab_projects(shard_path, disk_path)
+ Gitlab::Git::GitlabProjects.new(
+ shard_path,
+ disk_path,
+ global_hooks_path: Gitlab.config.gitlab_shell.hooks_path,
+ logger: Rails.logger
+ )
+ end
- vars = {}
+ def local_fetch_remote(storage_path, repository_relative_path, remote, ssh_auth: nil, forced: false, no_tags: false, prune: true)
+ vars = { force: forced, tags: !no_tags, prune: prune }
if ssh_auth&.ssh_import?
if ssh_auth.ssh_key_auth? && ssh_auth.ssh_private_key.present?
- vars['GITLAB_SHELL_SSH_KEY'] = ssh_auth.ssh_private_key
+ vars[:ssh_key] = ssh_auth.ssh_private_key
end
if ssh_auth.ssh_known_hosts.present?
- vars['GITLAB_SHELL_KNOWN_HOSTS'] = ssh_auth.ssh_known_hosts
+ vars[:known_hosts] = ssh_auth.ssh_known_hosts
end
end
- gitlab_shell_fast_execute_raise_error(args, vars)
+ cmd = gitlab_projects(storage_path, repository_relative_path)
+
+ success = cmd.fetch_remote(remote, git_timeout, vars)
+
+ raise Error, cmd.output unless success
+
+ success
end
def gitlab_shell_fast_execute(cmd)
@@ -326,6 +463,7 @@ module Gitlab
output, status = gitlab_shell_fast_execute_helper(cmd, vars)
raise Error, output unless status.zero?
+
true
end
@@ -337,6 +475,18 @@ module Gitlab
Bundler.with_original_env { Popen.popen(cmd, nil, vars) }
end
+ def gitaly_namespace_client(storage_path)
+ storage, _value = Gitlab.config.repositories.storages.find do |storage, value|
+ value['path'] == storage_path
+ end
+
+ Gitlab::GitalyClient::NamespaceService.new(storage)
+ end
+
+ def git_timeout
+ Gitlab.config.gitlab_shell.git_timeout
+ end
+
def gitaly_migrate(method, &block)
Gitlab::GitalyClient.migrate(method, &block)
rescue GRPC::NotFound, GRPC::BadStatus => e
diff --git a/lib/gitlab/shell_adapter.rb b/lib/gitlab/shell_adapter.rb
index fbe2a7a0d72..053dd4ab9e0 100644
--- a/lib/gitlab/shell_adapter.rb
+++ b/lib/gitlab/shell_adapter.rb
@@ -5,7 +5,7 @@
module Gitlab
module ShellAdapter
def gitlab_shell
- Gitlab::Shell.new
+ @gitlab_shell ||= Gitlab::Shell.new
end
end
end
diff --git a/lib/gitlab/sherlock/file_sample.rb b/lib/gitlab/sherlock/file_sample.rb
index 8a3e1a5e5bf..89072b01f2e 100644
--- a/lib/gitlab/sherlock/file_sample.rb
+++ b/lib/gitlab/sherlock/file_sample.rb
@@ -16,7 +16,7 @@ module Gitlab
end
def relative_path
- @relative_path ||= @file.gsub(/^#{Rails.root.to_s}\/?/, '')
+ @relative_path ||= @file.gsub(%r{^#{Rails.root.to_s}/?}, '')
end
def to_param
diff --git a/lib/gitlab/sherlock/middleware.rb b/lib/gitlab/sherlock/middleware.rb
index 687332fc5fc..4c88e33699a 100644
--- a/lib/gitlab/sherlock/middleware.rb
+++ b/lib/gitlab/sherlock/middleware.rb
@@ -2,7 +2,7 @@ module Gitlab
module Sherlock
# Rack middleware used for tracking request metrics.
class Middleware
- CONTENT_TYPES = /text\/html|application\/json/i
+ CONTENT_TYPES = %r{text/html|application/json}i
IGNORE_PATHS = %r{^/sherlock}
diff --git a/lib/gitlab/sherlock/query.rb b/lib/gitlab/sherlock/query.rb
index 948bf5e6528..02ddc3f47eb 100644
--- a/lib/gitlab/sherlock/query.rb
+++ b/lib/gitlab/sherlock/query.rb
@@ -4,7 +4,7 @@ module Gitlab
attr_reader :id, :query, :started_at, :finished_at, :backtrace
# SQL identifiers that should be prefixed with newlines.
- PREFIX_NEWLINE = /
+ PREFIX_NEWLINE = %r{
\s+(FROM
|(LEFT|RIGHT)?INNER\s+JOIN
|(LEFT|RIGHT)?OUTER\s+JOIN
@@ -13,7 +13,7 @@ module Gitlab
|GROUP\s+BY
|ORDER\s+BY
|LIMIT
- |OFFSET)\s+/ix # Vim indent breaks when this is on a newline :<
+ |OFFSET)\s+}ix # Vim indent breaks when this is on a newline :<
# Creates a new Query using a String and a separate Array of bindings.
#
diff --git a/lib/gitlab/sherlock/transaction.rb b/lib/gitlab/sherlock/transaction.rb
index 3489fb251b6..400a552bf99 100644
--- a/lib/gitlab/sherlock/transaction.rb
+++ b/lib/gitlab/sherlock/transaction.rb
@@ -89,7 +89,9 @@ module Gitlab
ActiveSupport::Notifications.subscribe('sql.active_record') do |_, start, finish, _, data|
next unless same_thread?
- track_query(data[:sql].strip, data[:binds], start, finish)
+ unless data.fetch(:cached, data[:name] == 'CACHE')
+ track_query(data[:sql].strip, data[:binds], start, finish)
+ end
end
end
diff --git a/lib/gitlab/sidekiq_config.rb b/lib/gitlab/sidekiq_config.rb
new file mode 100644
index 00000000000..c3d7814551c
--- /dev/null
+++ b/lib/gitlab/sidekiq_config.rb
@@ -0,0 +1,61 @@
+require 'yaml'
+require 'set'
+
+module Gitlab
+ module SidekiqConfig
+ # This method is called by `bin/sidekiq-cluster` in EE, which runs outside
+ # of bundler/Rails context, so we cannot use any gem or Rails methods.
+ def self.worker_queues(rails_path = Rails.root.to_s)
+ @worker_queues ||= {}
+ @worker_queues[rails_path] ||= YAML.load_file(File.join(rails_path, 'app/workers/all_queues.yml'))
+ end
+
+ # This method is called by `bin/sidekiq-cluster` in EE, which runs outside
+ # of bundler/Rails context, so we cannot use any gem or Rails methods.
+ def self.expand_queues(queues, all_queues = self.worker_queues)
+ return [] if queues.empty?
+
+ queues_set = all_queues.to_set
+
+ queues.flat_map do |queue|
+ [queue, *queues_set.grep(/\A#{queue}:/)]
+ end
+ end
+
+ def self.redis_queues
+ # Not memoized, because this can change during the life of the application
+ Sidekiq::Queue.all.map(&:name)
+ end
+
+ def self.config_queues
+ @config_queues ||= begin
+ config = YAML.load_file(Rails.root.join('config/sidekiq_queues.yml'))
+ config[:queues].map(&:first)
+ end
+ end
+
+ def self.cron_workers
+ @cron_workers ||= Settings.cron_jobs.map { |job_name, options| options['job_class'].constantize }
+ end
+
+ def self.workers
+ @workers ||= find_workers(Rails.root.join('app', 'workers'))
+ end
+
+ def self.find_workers(root)
+ concerns = root.join('concerns').to_s
+
+ workers = Dir[root.join('**', '*.rb')]
+ .reject { |path| path.start_with?(concerns) }
+
+ workers.map! do |path|
+ ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
+
+ ns.camelize.constantize
+ end
+
+ # Skip things that aren't workers
+ workers.select { |w| w < Sidekiq::Worker }
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_middleware/memory_killer.rb b/lib/gitlab/sidekiq_middleware/memory_killer.rb
deleted file mode 100644
index 104280f520a..00000000000
--- a/lib/gitlab/sidekiq_middleware/memory_killer.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-module Gitlab
- module SidekiqMiddleware
- class MemoryKiller
- # Default the RSS limit to 0, meaning the MemoryKiller is disabled
- MAX_RSS = (ENV['SIDEKIQ_MEMORY_KILLER_MAX_RSS'] || 0).to_s.to_i
- # Give Sidekiq 15 minutes of grace time after exceeding the RSS limit
- GRACE_TIME = (ENV['SIDEKIQ_MEMORY_KILLER_GRACE_TIME'] || 15 * 60).to_s.to_i
- # Wait 30 seconds for running jobs to finish during graceful shutdown
- SHUTDOWN_WAIT = (ENV['SIDEKIQ_MEMORY_KILLER_SHUTDOWN_WAIT'] || 30).to_s.to_i
- SHUTDOWN_SIGNAL = (ENV['SIDEKIQ_MEMORY_KILLER_SHUTDOWN_SIGNAL'] || 'SIGKILL').to_s
-
- # Create a mutex used to ensure there will be only one thread waiting to
- # shut Sidekiq down
- MUTEX = Mutex.new
-
- def call(worker, job, queue)
- yield
- current_rss = get_rss
-
- return unless MAX_RSS > 0 && current_rss > MAX_RSS
-
- Thread.new do
- # Return if another thread is already waiting to shut Sidekiq down
- return unless MUTEX.try_lock
-
- Sidekiq.logger.warn "current RSS #{current_rss} exceeds maximum RSS "\
- "#{MAX_RSS}"
- Sidekiq.logger.warn "this thread will shut down PID #{Process.pid} - Worker #{worker.class} - JID-#{job['jid']}"\
- "in #{GRACE_TIME} seconds"
- sleep(GRACE_TIME)
-
- Sidekiq.logger.warn "sending SIGTERM to PID #{Process.pid} - Worker #{worker.class} - JID-#{job['jid']}"
- Process.kill('SIGTERM', Process.pid)
-
- Sidekiq.logger.warn "waiting #{SHUTDOWN_WAIT} seconds before sending "\
- "#{SHUTDOWN_SIGNAL} to PID #{Process.pid} - Worker #{worker.class} - JID-#{job['jid']}"
- sleep(SHUTDOWN_WAIT)
-
- Sidekiq.logger.warn "sending #{SHUTDOWN_SIGNAL} to PID #{Process.pid} - Worker #{worker.class} - JID-#{job['jid']}"
- Process.kill(SHUTDOWN_SIGNAL, Process.pid)
- end
- end
-
- private
-
- def get_rss
- output, status = Gitlab::Popen.popen(%W(ps -o rss= -p #{Process.pid}))
- return 0 unless status.zero?
-
- output.to_i
- end
- end
- end
-end
diff --git a/lib/gitlab/sidekiq_middleware/shutdown.rb b/lib/gitlab/sidekiq_middleware/shutdown.rb
new file mode 100644
index 00000000000..c2b8d6de66e
--- /dev/null
+++ b/lib/gitlab/sidekiq_middleware/shutdown.rb
@@ -0,0 +1,133 @@
+require 'mutex_m'
+
+module Gitlab
+ module SidekiqMiddleware
+ class Shutdown
+ extend Mutex_m
+
+ # Default the RSS limit to 0, meaning the MemoryKiller is disabled
+ MAX_RSS = (ENV['SIDEKIQ_MEMORY_KILLER_MAX_RSS'] || 0).to_s.to_i
+ # Give Sidekiq 15 minutes of grace time after exceeding the RSS limit
+ GRACE_TIME = (ENV['SIDEKIQ_MEMORY_KILLER_GRACE_TIME'] || 15 * 60).to_s.to_i
+ # Wait 30 seconds for running jobs to finish during graceful shutdown
+ SHUTDOWN_WAIT = (ENV['SIDEKIQ_MEMORY_KILLER_SHUTDOWN_WAIT'] || 30).to_s.to_i
+
+ # This exception can be used to request that the middleware start shutting down Sidekiq
+ WantShutdown = Class.new(StandardError)
+
+ ShutdownWithoutRaise = Class.new(WantShutdown)
+ private_constant :ShutdownWithoutRaise
+
+ # For testing only, to avoid race conditions (?) in Rspec mocks.
+ attr_reader :trace
+
+ # We store the shutdown thread in a class variable to ensure that there
+ # can be only one shutdown thread in the process.
+ def self.create_shutdown_thread
+ mu_synchronize do
+ return unless @shutdown_thread.nil?
+
+ @shutdown_thread = Thread.new { yield }
+ end
+ end
+
+ # For testing only: so we can wait for the shutdown thread to finish.
+ def self.shutdown_thread
+ mu_synchronize { @shutdown_thread }
+ end
+
+ # For testing only: so that we can reset the global state before each test.
+ def self.clear_shutdown_thread
+ mu_synchronize { @shutdown_thread = nil }
+ end
+
+ def initialize
+ @trace = Queue.new if Rails.env.test?
+ end
+
+ def call(worker, job, queue)
+ shutdown_exception = nil
+
+ begin
+ yield
+ check_rss!
+ rescue WantShutdown => ex
+ shutdown_exception = ex
+ end
+
+ return unless shutdown_exception
+
+ self.class.create_shutdown_thread do
+ do_shutdown(worker, job, shutdown_exception)
+ end
+
+ raise shutdown_exception unless shutdown_exception.is_a?(ShutdownWithoutRaise)
+ end
+
+ private
+
+ def do_shutdown(worker, job, shutdown_exception)
+ Sidekiq.logger.warn "Sidekiq worker PID-#{pid} shutting down because of #{shutdown_exception} after job "\
+ "#{worker.class} JID-#{job['jid']}"
+ Sidekiq.logger.warn "Sidekiq worker PID-#{pid} will stop fetching new jobs in #{GRACE_TIME} seconds, and will be shut down #{SHUTDOWN_WAIT} seconds later"
+
+ # Wait `GRACE_TIME` to give the memory intensive job time to finish.
+ # Then, tell Sidekiq to stop fetching new jobs.
+ wait_and_signal(GRACE_TIME, 'SIGTSTP', 'stop fetching new jobs')
+
+ # Wait `SHUTDOWN_WAIT` to give already fetched jobs time to finish.
+ # Then, tell Sidekiq to gracefully shut down by giving jobs a few more
+ # moments to finish, killing and requeuing them if they didn't, and
+ # then terminating itself.
+ wait_and_signal(SHUTDOWN_WAIT, 'SIGTERM', 'gracefully shut down')
+
+ # Wait for Sidekiq to shutdown gracefully, and kill it if it didn't.
+ wait_and_signal(Sidekiq.options[:timeout] + 2, 'SIGKILL', 'die')
+ end
+
+ def check_rss!
+ return unless MAX_RSS > 0
+
+ current_rss = get_rss
+ return unless current_rss > MAX_RSS
+
+ raise ShutdownWithoutRaise.new("current RSS #{current_rss} exceeds maximum RSS #{MAX_RSS}")
+ end
+
+ def get_rss
+ output, status = Gitlab::Popen.popen(%W(ps -o rss= -p #{pid}), Rails.root.to_s)
+ return 0 unless status.zero?
+
+ output.to_i
+ end
+
+ def wait_and_signal(time, signal, explanation)
+ Sidekiq.logger.warn "waiting #{time} seconds before sending Sidekiq worker PID-#{pid} #{signal} (#{explanation})"
+ sleep(time)
+
+ Sidekiq.logger.warn "sending Sidekiq worker PID-#{pid} #{signal} (#{explanation})"
+ kill(signal, pid)
+ end
+
+ def pid
+ Process.pid
+ end
+
+ def sleep(time)
+ if Rails.env.test?
+ @trace << [:sleep, time]
+ else
+ Kernel.sleep(time)
+ end
+ end
+
+ def kill(signal, pid)
+ if Rails.env.test?
+ @trace << [:kill, signal, pid]
+ else
+ Process.kill(signal, pid)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_status.rb b/lib/gitlab/sidekiq_status.rb
index a0a2769cf9e..a1f689d94d9 100644
--- a/lib/gitlab/sidekiq_status.rb
+++ b/lib/gitlab/sidekiq_status.rb
@@ -51,6 +51,13 @@ module Gitlab
self.num_running(job_ids).zero?
end
+ # Returns true if the given job is running
+ #
+ # job_id - The Sidekiq job ID to check.
+ def self.running?(job_id)
+ num_running([job_id]) > 0
+ end
+
# Returns the number of jobs that are running.
#
# job_ids - The Sidekiq job IDs to check.
diff --git a/lib/gitlab/sidekiq_versioning.rb b/lib/gitlab/sidekiq_versioning.rb
new file mode 100644
index 00000000000..9683214ec18
--- /dev/null
+++ b/lib/gitlab/sidekiq_versioning.rb
@@ -0,0 +1,25 @@
+module Gitlab
+ module SidekiqVersioning
+ def self.install!
+ Sidekiq::Manager.prepend SidekiqVersioning::Manager
+
+ # The Sidekiq client API always adds the queue to the Sidekiq queue
+ # list, but mail_room and gitlab-shell do not. This is only necessary
+ # for monitoring.
+ begin
+ queues = SidekiqConfig.worker_queues
+
+ if queues.any?
+ Sidekiq.redis do |conn|
+ conn.pipelined do
+ queues.each do |queue|
+ conn.sadd('queues', queue)
+ end
+ end
+ end
+ end
+ rescue ::Redis::BaseError, SocketError, Errno::ENOENT, Errno::EADDRNOTAVAIL, Errno::EAFNOSUPPORT, Errno::ECONNRESET, Errno::ECONNREFUSED
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_versioning/manager.rb b/lib/gitlab/sidekiq_versioning/manager.rb
new file mode 100644
index 00000000000..308be0fdf76
--- /dev/null
+++ b/lib/gitlab/sidekiq_versioning/manager.rb
@@ -0,0 +1,12 @@
+module Gitlab
+ module SidekiqVersioning
+ module Manager
+ def initialize(options = {})
+ options[:strict] = false
+ options[:queues] = SidekiqConfig.expand_queues(options[:queues])
+ Sidekiq.logger.info "Listening on queues #{options[:queues].uniq.sort}"
+ super
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/slash_commands/base_command.rb b/lib/gitlab/slash_commands/base_command.rb
index cc3c9a50555..466554e398c 100644
--- a/lib/gitlab/slash_commands/base_command.rb
+++ b/lib/gitlab/slash_commands/base_command.rb
@@ -31,10 +31,11 @@ module Gitlab
raise NotImplementedError
end
- attr_accessor :project, :current_user, :params
+ attr_accessor :project, :current_user, :params, :chat_name
- def initialize(project, user, params = {})
- @project, @current_user, @params = project, user, params.dup
+ def initialize(project, chat_name, params = {})
+ @project, @current_user, @params = project, chat_name.user, params.dup
+ @chat_name = chat_name
end
private
diff --git a/lib/gitlab/slash_commands/command.rb b/lib/gitlab/slash_commands/command.rb
index a78408b0519..85aaa6b0eba 100644
--- a/lib/gitlab/slash_commands/command.rb
+++ b/lib/gitlab/slash_commands/command.rb
@@ -13,12 +13,13 @@ module Gitlab
if command
if command.allowed?(project, current_user)
- command.new(project, current_user, params).execute(match)
+ command.new(project, chat_name, params).execute(match)
else
Gitlab::SlashCommands::Presenters::Access.new.access_denied
end
else
- Gitlab::SlashCommands::Help.new(project, current_user, params).execute(available_commands, params[:text])
+ Gitlab::SlashCommands::Help.new(project, chat_name, params)
+ .execute(available_commands, params[:text])
end
end
diff --git a/lib/gitlab/slash_commands/presenters/issue_base.rb b/lib/gitlab/slash_commands/presenters/issue_base.rb
index 341f2aabdd0..31c1e97efba 100644
--- a/lib/gitlab/slash_commands/presenters/issue_base.rb
+++ b/lib/gitlab/slash_commands/presenters/issue_base.rb
@@ -11,32 +11,36 @@ module Gitlab
end
def project
- @resource.project
+ resource.project
end
def author
- @resource.author
+ resource.author
end
def fields
[
{
title: "Assignee",
- value: @resource.assignees.any? ? @resource.assignees.first.name : "_None_",
+ value: resource.assignees.any? ? resource.assignees.first.name : "_None_",
short: true
},
{
title: "Milestone",
- value: @resource.milestone ? @resource.milestone.title : "_None_",
+ value: resource.milestone ? resource.milestone.title : "_None_",
short: true
},
{
title: "Labels",
- value: @resource.labels.any? ? @resource.label_names.join(', ') : "_None_",
+ value: resource.labels.any? ? resource.label_names.join(', ') : "_None_",
short: true
}
]
end
+
+ private
+
+ attr_reader :resource
end
end
end
diff --git a/lib/gitlab/snippet_search_results.rb b/lib/gitlab/snippet_search_results.rb
index b85f70e450e..4f86b3e8f73 100644
--- a/lib/gitlab/snippet_search_results.rb
+++ b/lib/gitlab/snippet_search_results.rb
@@ -16,7 +16,7 @@ module Gitlab
when 'snippet_blobs'
snippet_blobs.page(page).per(per_page)
else
- super
+ super(scope, nil, false)
end
end
diff --git a/lib/gitlab/sql/pattern.rb b/lib/gitlab/sql/pattern.rb
index 7c2d1d8f887..53744bad1f4 100644
--- a/lib/gitlab/sql/pattern.rb
+++ b/lib/gitlab/sql/pattern.rb
@@ -4,9 +4,15 @@ module Gitlab
extend ActiveSupport::Concern
MIN_CHARS_FOR_PARTIAL_MATCHING = 3
- REGEX_QUOTED_WORD = /(?<=^| )"[^"]+"(?= |$)/
+ REGEX_QUOTED_WORD = /(?<=\A| )"[^"]+"(?= |\z)/
class_methods do
+ def fuzzy_search(query, columns)
+ matches = columns.map { |col| fuzzy_arel_match(col, query) }.compact.reduce(:or)
+
+ where(matches)
+ end
+
def to_pattern(query)
if partial_matching?(query)
"%#{sanitize_sql_like(query)}%"
@@ -19,12 +25,29 @@ module Gitlab
query.length >= MIN_CHARS_FOR_PARTIAL_MATCHING
end
- def to_fuzzy_arel(column, query)
- words = select_fuzzy_words(query)
+ # column - The column name to search in.
+ # query - The text to search for.
+ # lower_exact_match - When set to `true` we'll fall back to using
+ # `LOWER(column) = query` instead of using `ILIKE`.
+ def fuzzy_arel_match(column, query, lower_exact_match: false)
+ query = query.squish
+ return nil unless query.present?
- matches = words.map { |word| arel_table[column].matches(to_pattern(word)) }
+ words = select_fuzzy_words(query)
- matches.reduce { |result, match| result.and(match) }
+ if words.any?
+ words.map { |word| arel_table[column].matches(to_pattern(word)) }.reduce(:and)
+ else
+ # No words of at least 3 chars, but we can search for an exact
+ # case insensitive match with the query as a whole
+ if lower_exact_match
+ Arel::Nodes::NamedFunction
+ .new('LOWER', [arel_table[column]])
+ .eq(query)
+ else
+ arel_table[column].matches(sanitize_sql_like(query))
+ end
+ end
end
def select_fuzzy_words(query)
@@ -32,7 +55,7 @@ module Gitlab
query = quoted_words.reduce(query) { |q, quoted_word| q.sub(quoted_word, '') }
- words = query.split(/\s+/)
+ words = query.split
quoted_words.map! { |quoted_word| quoted_word[1..-2] }
diff --git a/lib/gitlab/sql/union.rb b/lib/gitlab/sql/union.rb
index 222021e8802..c99b262f1ca 100644
--- a/lib/gitlab/sql/union.rb
+++ b/lib/gitlab/sql/union.rb
@@ -12,8 +12,9 @@ module Gitlab
#
# Project.where("id IN (#{sql})")
class Union
- def initialize(relations)
+ def initialize(relations, remove_duplicates: true)
@relations = relations
+ @remove_duplicates = remove_duplicates
end
def to_sql
@@ -25,7 +26,15 @@ module Gitlab
@relations.map { |rel| rel.reorder(nil).to_sql }.reject(&:blank?)
end
- fragments.join("\nUNION\n")
+ if fragments.any?
+ fragments.join("\n#{union_keyword}\n")
+ else
+ 'NULL'
+ end
+ end
+
+ def union_keyword
+ @remove_duplicates ? 'UNION' : 'UNION ALL'
end
end
end
diff --git a/lib/gitlab/ssh_public_key.rb b/lib/gitlab/ssh_public_key.rb
index 89ca1298120..6f63ea91ae8 100644
--- a/lib/gitlab/ssh_public_key.rb
+++ b/lib/gitlab/ssh_public_key.rb
@@ -21,6 +21,22 @@ module Gitlab
technology(name)&.supported_sizes
end
+ def self.sanitize(key_content)
+ ssh_type, *parts = key_content.strip.split
+
+ return key_content if parts.empty?
+
+ parts.each_with_object("#{ssh_type} ").with_index do |(part, content), index|
+ content << part
+
+ if Gitlab::SSHPublicKey.new(content).valid?
+ break [content, parts[index + 1]].compact.join(' ') # Add the comment part if present
+ elsif parts.size == index + 1 # return original content if we've reached the last element
+ break key_content
+ end
+ end
+ end
+
attr_reader :key_text, :key
# Unqualified MD5 fingerprint for compatibility
@@ -37,23 +53,23 @@ module Gitlab
end
def valid?
- key.present?
+ SSHKey.valid_ssh_public_key?(key_text)
end
def type
- technology.name if valid?
+ technology.name if key.present?
end
def bits
- return unless valid?
+ return if key.blank?
case type
when :rsa
- key.n.num_bits
+ key.n&.num_bits
when :dsa
- key.p.num_bits
+ key.p&.num_bits
when :ecdsa
- key.group.order.num_bits
+ key.group.order&.num_bits
when :ed25519
256
else
diff --git a/lib/gitlab/storage_check.rb b/lib/gitlab/storage_check.rb
new file mode 100644
index 00000000000..fe81513c9ec
--- /dev/null
+++ b/lib/gitlab/storage_check.rb
@@ -0,0 +1,11 @@
+require_relative 'storage_check/cli'
+require_relative 'storage_check/gitlab_caller'
+require_relative 'storage_check/option_parser'
+require_relative 'storage_check/response'
+
+module Gitlab
+ module StorageCheck
+ ENDPOINT = '/-/storage_check'.freeze
+ Options = Struct.new(:target, :token, :interval, :dryrun)
+ end
+end
diff --git a/lib/gitlab/storage_check/cli.rb b/lib/gitlab/storage_check/cli.rb
new file mode 100644
index 00000000000..9b64c8e033a
--- /dev/null
+++ b/lib/gitlab/storage_check/cli.rb
@@ -0,0 +1,71 @@
+module Gitlab
+ module StorageCheck
+ class CLI
+ def self.start!(args)
+ runner = new(Gitlab::StorageCheck::OptionParser.parse!(args))
+ runner.start_loop
+ end
+
+ attr_reader :logger, :options
+
+ def initialize(options)
+ @options = options
+ @logger = Logger.new(STDOUT)
+ end
+
+ def start_loop
+ logger.info "Checking #{options.target} every #{options.interval} seconds"
+
+ if options.dryrun
+ logger.info "Dryrun, exiting..."
+ return
+ end
+
+ begin
+ loop do
+ response = GitlabCaller.new(options).call!
+ log_response(response)
+ update_settings(response)
+
+ sleep options.interval
+ end
+ rescue Interrupt
+ logger.info "Ending storage-check"
+ end
+ end
+
+ def update_settings(response)
+ previous_interval = options.interval
+
+ if response.valid?
+ options.interval = response.check_interval || previous_interval
+ end
+
+ if previous_interval != options.interval
+ logger.info "Interval changed: #{options.interval} seconds"
+ end
+ end
+
+ def log_response(response)
+ unless response.valid?
+ return logger.error("Invalid response checking nfs storage: #{response.http_response.inspect}")
+ end
+
+ if response.responsive_shards.any?
+ logger.debug("Responsive shards: #{response.responsive_shards.join(', ')}")
+ end
+
+ warnings = []
+ if response.skipped_shards.any?
+ warnings << "Skipped shards: #{response.skipped_shards.join(', ')}"
+ end
+
+ if response.failing_shards.any?
+ warnings << "Failing shards: #{response.failing_shards.join(', ')}"
+ end
+
+ logger.warn(warnings.join(' - ')) if warnings.any?
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/storage_check/gitlab_caller.rb b/lib/gitlab/storage_check/gitlab_caller.rb
new file mode 100644
index 00000000000..44952b68844
--- /dev/null
+++ b/lib/gitlab/storage_check/gitlab_caller.rb
@@ -0,0 +1,39 @@
+require 'excon'
+
+module Gitlab
+ module StorageCheck
+ class GitlabCaller
+ def initialize(options)
+ @options = options
+ end
+
+ def call!
+ Gitlab::StorageCheck::Response.new(get_response)
+ rescue Errno::ECONNREFUSED, Excon::Error
+ # Server not ready, treated as invalid response.
+ Gitlab::StorageCheck::Response.new(nil)
+ end
+
+ def get_response
+ scheme, *other_parts = URI.split(@options.target)
+ socket_path = if scheme == 'unix'
+ other_parts.compact.join
+ end
+
+ connection = Excon.new(@options.target, socket: socket_path)
+ connection.post(path: Gitlab::StorageCheck::ENDPOINT,
+ headers: headers)
+ end
+
+ def headers
+ @headers ||= begin
+ headers = {}
+ headers['Content-Type'] = headers['Accept'] = 'application/json'
+ headers['TOKEN'] = @options.token if @options.token
+
+ headers
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/storage_check/option_parser.rb b/lib/gitlab/storage_check/option_parser.rb
new file mode 100644
index 00000000000..66ed7906f97
--- /dev/null
+++ b/lib/gitlab/storage_check/option_parser.rb
@@ -0,0 +1,39 @@
+module Gitlab
+ module StorageCheck
+ class OptionParser
+ def self.parse!(args)
+ # Start out with some defaults
+ options = Gitlab::StorageCheck::Options.new(nil, nil, 1, false)
+
+ parser = ::OptionParser.new do |opts|
+ opts.banner = "Usage: bin/storage_check [options]"
+
+ opts.on('-t=string', '--target string', 'URL or socket to trigger storage check') do |value|
+ options.target = value
+ end
+
+ opts.on('-T=string', '--token string', 'Health token to use') { |value| options.token = value }
+
+ opts.on('-i=n', '--interval n', ::OptionParser::DecimalInteger, 'Seconds between checks') do |value|
+ options.interval = value
+ end
+
+ opts.on('-d', '--dryrun', "Output what will be performed, but don't start the process") do |value|
+ options.dryrun = value
+ end
+ end
+ parser.parse!(args)
+
+ unless options.target
+ raise ::OptionParser::InvalidArgument.new('Provide a URI to provide checks')
+ end
+
+ if URI.parse(options.target).scheme.nil?
+ raise ::OptionParser::InvalidArgument.new('Add the scheme to the target, `unix://`, `https://` or `http://` are supported')
+ end
+
+ options
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/storage_check/response.rb b/lib/gitlab/storage_check/response.rb
new file mode 100644
index 00000000000..326ab236e3e
--- /dev/null
+++ b/lib/gitlab/storage_check/response.rb
@@ -0,0 +1,77 @@
+require 'json'
+
+module Gitlab
+ module StorageCheck
+ class Response
+ attr_reader :http_response
+
+ def initialize(http_response)
+ @http_response = http_response
+ end
+
+ def valid?
+ @http_response && (200...299).cover?(@http_response.status) &&
+ @http_response.headers['Content-Type'].include?('application/json') &&
+ parsed_response
+ end
+
+ def check_interval
+ return nil unless parsed_response
+
+ parsed_response['check_interval']
+ end
+
+ def responsive_shards
+ divided_results[:responsive_shards]
+ end
+
+ def skipped_shards
+ divided_results[:skipped_shards]
+ end
+
+ def failing_shards
+ divided_results[:failing_shards]
+ end
+
+ private
+
+ def results
+ return [] unless parsed_response
+
+ parsed_response['results']
+ end
+
+ def divided_results
+ return @divided_results if @divided_results
+
+ @divided_results = {}
+ @divided_results[:responsive_shards] = []
+ @divided_results[:skipped_shards] = []
+ @divided_results[:failing_shards] = []
+
+ results.each do |info|
+ name = info['storage']
+
+ case info['success']
+ when true
+ @divided_results[:responsive_shards] << name
+ when false
+ @divided_results[:failing_shards] << name
+ else
+ @divided_results[:skipped_shards] << name
+ end
+ end
+
+ @divided_results
+ end
+
+ def parsed_response
+ return @parsed_response if defined?(@parsed_response)
+
+ @parsed_response = JSON.parse(@http_response.body)
+ rescue JSON::JSONError
+ @parsed_response = nil
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/string_placeholder_replacer.rb b/lib/gitlab/string_placeholder_replacer.rb
new file mode 100644
index 00000000000..9a2219b7d77
--- /dev/null
+++ b/lib/gitlab/string_placeholder_replacer.rb
@@ -0,0 +1,27 @@
+module Gitlab
+ class StringPlaceholderReplacer
+ # This method accepts the following paras
+ # - string: the string to be analyzed
+ # - placeholder_regex: i.e. /%{project_path|project_id|default_branch|commit_sha}/
+ # - block: this block will be called with each placeholder found in the string using
+ # the placeholder regex. If the result of the block is nil, the original
+ # placeholder will be returned.
+
+ def self.replace_string_placeholders(string, placeholder_regex = nil, &block)
+ return string if string.blank? || placeholder_regex.blank? || !block_given?
+
+ replace_placeholders(string, placeholder_regex, &block)
+ end
+
+ class << self
+ private
+
+ # If the result of the block is nil, then the placeholder is returned
+ def replace_placeholders(string, placeholder_regex, &block)
+ string.gsub(/%{(#{placeholder_regex})}/) do |arg|
+ yield($~[1]) || arg
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/string_range_marker.rb b/lib/gitlab/string_range_marker.rb
index 11aeec1ebfa..c6ad997a4d4 100644
--- a/lib/gitlab/string_range_marker.rb
+++ b/lib/gitlab/string_range_marker.rb
@@ -14,7 +14,7 @@ module Gitlab
end
def mark(marker_ranges)
- return rich_line unless marker_ranges
+ return rich_line unless marker_ranges&.any?
if html_escaped
rich_marker_ranges = []
@@ -90,6 +90,7 @@ module Gitlab
# Takes an array of integers, and returns an array of ranges covering the same integers
def collapse_ranges(positions)
return [] if positions.empty?
+
ranges = []
start = prev = positions[0]
diff --git a/lib/gitlab/string_regex_marker.rb b/lib/gitlab/string_regex_marker.rb
index 7ebf1c0428c..b19aa6dea35 100644
--- a/lib/gitlab/string_regex_marker.rb
+++ b/lib/gitlab/string_regex_marker.rb
@@ -1,13 +1,15 @@
module Gitlab
class StringRegexMarker < StringRangeMarker
def mark(regex, group: 0, &block)
- regex_match = raw_line.match(regex)
- return rich_line unless regex_match
+ ranges = []
- begin_index, end_index = regex_match.offset(group)
- name_range = begin_index..(end_index - 1)
+ raw_line.scan(regex) do
+ begin_index, end_index = Regexp.last_match.offset(group)
- super([name_range], &block)
+ ranges << (begin_index..(end_index - 1))
+ end
+
+ super(ranges, &block)
end
end
end
diff --git a/lib/tasks/gitlab/task_helpers.rb b/lib/gitlab/task_helpers.rb
index 8a63f486fa3..34bee6fecbe 100644
--- a/lib/tasks/gitlab/task_helpers.rb
+++ b/lib/gitlab/task_helpers.rb
@@ -1,10 +1,14 @@
require 'rainbow/ext/string'
+require 'gitlab/utils/strong_memoize'
+# rubocop:disable Rails/Output
module Gitlab
TaskFailedError = Class.new(StandardError)
TaskAbortedByUserError = Class.new(StandardError)
module TaskHelpers
+ include Gitlab::Utils::StrongMemoize
+
extend self
# Ask if the user wants to continue
@@ -93,11 +97,9 @@ module Gitlab
end
def gid_for(group_name)
- begin
- Etc.getgrnam(group_name).gid
- rescue ArgumentError # no group
- "group #{group_name} doesn't exist"
- end
+ Etc.getgrnam(group_name).gid
+ rescue ArgumentError # no group
+ "group #{group_name} doesn't exist"
end
def gitlab_user
@@ -105,16 +107,16 @@ module Gitlab
end
def gitlab_user?
- return @is_gitlab_user unless @is_gitlab_user.nil?
-
- current_user = run_command(%w(whoami)).chomp
- @is_gitlab_user = current_user == gitlab_user
+ strong_memoize(:is_gitlab_user) do
+ current_user = run_command(%w(whoami)).chomp
+ current_user == gitlab_user
+ end
end
def warn_user_is_not_gitlab
- return if @warned_user_not_gitlab
+ return if gitlab_user?
- unless gitlab_user?
+ strong_memoize(:warned_user_not_gitlab) do
current_user = run_command(%w(whoami)).chomp
puts " Warning ".color(:black).background(:yellow)
@@ -122,14 +124,12 @@ module Gitlab
puts " Things may work\/fail for the wrong reasons."
puts " For correct results you should run this as user #{gitlab_user.color(:magenta)}."
puts ""
-
- @warned_user_not_gitlab = true
end
end
def all_repos
Gitlab.config.repositories.storages.each_value do |repository_storage|
- IO.popen(%W(find #{repository_storage['path']} -mindepth 2 -maxdepth 2 -type d -name *.git)) do |find|
+ IO.popen(%W(find #{repository_storage['path']} -mindepth 2 -type d -name *.git)) do |find|
find.each_line do |path|
yield path.chomp
end
diff --git a/lib/gitlab/tcp_checker.rb b/lib/gitlab/tcp_checker.rb
new file mode 100644
index 00000000000..6e24e46d0ea
--- /dev/null
+++ b/lib/gitlab/tcp_checker.rb
@@ -0,0 +1,45 @@
+module Gitlab
+ class TcpChecker
+ attr_reader :remote_host, :remote_port, :local_host, :local_port, :error
+
+ def initialize(remote_host, remote_port, local_host = nil, local_port = nil)
+ @remote_host = remote_host
+ @remote_port = remote_port
+ @local_host = local_host
+ @local_port = local_port
+ end
+
+ def local
+ join_host_port(local_host, local_port)
+ end
+
+ def remote
+ join_host_port(remote_host, remote_port)
+ end
+
+ def check(timeout: 10)
+ Socket.tcp(
+ remote_host, remote_port,
+ local_host, local_port,
+ connect_timeout: timeout
+ ) do |sock|
+ @local_port, @local_host = Socket.unpack_sockaddr_in(sock.local_address)
+ @remote_port, @remote_host = Socket.unpack_sockaddr_in(sock.remote_address)
+ end
+
+ true
+ rescue => err
+ @error = err
+
+ false
+ end
+
+ private
+
+ def join_host_port(host, port)
+ host = "[#{host}]" if host.include?(':')
+
+ "#{host}:#{port}"
+ end
+ end
+end
diff --git a/lib/gitlab/template/finders/repo_template_finder.rb b/lib/gitlab/template/finders/repo_template_finder.rb
index cb7957e2af9..33f07fa0120 100644
--- a/lib/gitlab/template/finders/repo_template_finder.rb
+++ b/lib/gitlab/template/finders/repo_template_finder.rb
@@ -18,6 +18,7 @@ module Gitlab
def read(path)
blob = @repository.blob_at(@commit.id, path) if @commit
raise FileNotFoundError if blob.nil?
+
blob.data
end
diff --git a/lib/gitlab/testing/request_blocker_middleware.rb b/lib/gitlab/testing/request_blocker_middleware.rb
index aa67fa08577..53333b9b06b 100644
--- a/lib/gitlab/testing/request_blocker_middleware.rb
+++ b/lib/gitlab/testing/request_blocker_middleware.rb
@@ -7,6 +7,7 @@ module Gitlab
class RequestBlockerMiddleware
@@num_active_requests = Concurrent::AtomicFixnum.new(0)
@@block_requests = Concurrent::AtomicBoolean.new(false)
+ @@slow_requests = Concurrent::AtomicBoolean.new(false)
# Returns the number of requests the server is currently processing.
def self.num_active_requests
@@ -19,9 +20,15 @@ module Gitlab
@@block_requests.value = true
end
+ # Slows down incoming requests (useful for race conditions).
+ def self.slow_requests!
+ @@slow_requests.value = true
+ end
+
# Allows the server to accept requests again.
def self.allow_requests!
@@block_requests.value = false
+ @@slow_requests.value = false
end
def initialize(app)
@@ -30,11 +37,14 @@ module Gitlab
def call(env)
increment_active_requests
+
if block_requests?
block_request(env)
else
+ sleep 0.2 if slow_requests?
@app.call(env)
end
+
ensure
decrement_active_requests
end
@@ -45,6 +55,10 @@ module Gitlab
@@block_requests.true?
end
+ def slow_requests?
+ @@slow_requests.true?
+ end
+
def block_request(env)
[503, {}, []]
end
diff --git a/lib/gitlab/testing/request_inspector_middleware.rb b/lib/gitlab/testing/request_inspector_middleware.rb
new file mode 100644
index 00000000000..e387667480d
--- /dev/null
+++ b/lib/gitlab/testing/request_inspector_middleware.rb
@@ -0,0 +1,71 @@
+# rubocop:disable Style/ClassVars
+
+module Gitlab
+ module Testing
+ class RequestInspectorMiddleware
+ @@log_requests = Concurrent::AtomicBoolean.new(false)
+ @@logged_requests = Concurrent::Array.new
+ @@inject_headers = Concurrent::Hash.new
+
+ # Resets the current request log and starts logging requests
+ def self.log_requests!(headers = {})
+ @@inject_headers.replace(headers)
+ @@logged_requests.replace([])
+ @@log_requests.value = true
+ end
+
+ # Stops logging requests
+ def self.stop_logging!
+ @@log_requests.value = false
+ end
+
+ def self.requests
+ @@logged_requests
+ end
+
+ def initialize(app)
+ @app = app
+ end
+
+ def call(env)
+ return @app.call(env) unless @@log_requests.true?
+
+ url = env['REQUEST_URI']
+ env.merge! http_headers_env(@@inject_headers) if @@inject_headers.any?
+ request_headers = env_http_headers(env)
+ status, headers, body = @app.call(env)
+
+ request = OpenStruct.new(
+ url: url,
+ status_code: status,
+ request_headers: request_headers,
+ response_headers: headers
+ )
+ log_request request
+
+ [status, headers, body]
+ end
+
+ private
+
+ def env_http_headers(env)
+ Hash[*env.select { |k, v| k.start_with? 'HTTP_' }
+ .collect { |k, v| [k.sub(/^HTTP_/, ''), v] }
+ .collect { |k, v| [k.split('_').collect(&:capitalize).join('-'), v] }
+ .sort
+ .flatten]
+ end
+
+ def http_headers_env(headers)
+ Hash[*headers
+ .collect { |k, v| [k.split('-').collect(&:upcase).join('_'), v] }
+ .collect { |k, v| [k.prepend('HTTP_'), v] }
+ .flatten]
+ end
+
+ def log_request(response)
+ @@logged_requests.push(response)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/timeless.rb b/lib/gitlab/timeless.rb
index b290c716f97..76a1808c8ac 100644
--- a/lib/gitlab/timeless.rb
+++ b/lib/gitlab/timeless.rb
@@ -9,6 +9,7 @@ module Gitlab
else
block.call
end
+
ensure
model.record_timestamps = original_record_timestamps
end
diff --git a/lib/gitlab/upgrader.rb b/lib/gitlab/upgrader.rb
index 961df0468a4..024be6aca44 100644
--- a/lib/gitlab/upgrader.rb
+++ b/lib/gitlab/upgrader.rb
@@ -1,6 +1,3 @@
-require_relative "popen"
-require_relative "version_info"
-
module Gitlab
class Upgrader
def execute
@@ -12,6 +9,7 @@ module Gitlab
puts "You are using the latest GitLab version"
else
puts "Newer GitLab version is available"
+
answer = if ARGV.first == "-y"
"yes"
else
@@ -51,7 +49,7 @@ module Gitlab
def fetch_git_tags
remote_tags, _ = Gitlab::Popen.popen(%W(#{Gitlab.config.git.bin_path} ls-remote --tags https://gitlab.com/gitlab-org/gitlab-ce.git))
- remote_tags.split("\n").grep(/tags\/v#{current_version.major}/)
+ remote_tags.split("\n").grep(%r{tags/v#{current_version.major}})
end
def update_commands
@@ -77,6 +75,7 @@ module Gitlab
update_commands.each do |title, cmd|
puts title
puts " -> #{cmd.join(' ')}"
+
if system(env, *cmd)
puts " -> OK"
else
diff --git a/lib/gitlab/uploads_transfer.rb b/lib/gitlab/uploads_transfer.rb
index b5f41240529..7d7400bdabf 100644
--- a/lib/gitlab/uploads_transfer.rb
+++ b/lib/gitlab/uploads_transfer.rb
@@ -1,7 +1,7 @@
module Gitlab
class UploadsTransfer < ProjectTransfer
def root_dir
- File.join(CarrierWave.root, FileUploader.base_dir)
+ FileUploader.root
end
end
end
diff --git a/lib/gitlab/url_blocker.rb b/lib/gitlab/url_blocker.rb
index fee1a127fd7..13150ddab67 100644
--- a/lib/gitlab/url_blocker.rb
+++ b/lib/gitlab/url_blocker.rb
@@ -22,10 +22,12 @@ module Gitlab
return true if blocked_user_or_hostname?(uri.user)
return true if blocked_user_or_hostname?(uri.hostname)
- server_ips = Resolv.getaddresses(uri.hostname)
+ server_ips = Addrinfo.getaddrinfo(uri.hostname, 80, nil, :STREAM).map(&:ip_address)
return true if (blocked_ips & server_ips).any?
rescue Addressable::URI::InvalidURIError
return true
+ rescue SocketError
+ return false
end
false
diff --git a/lib/gitlab/url_sanitizer.rb b/lib/gitlab/url_sanitizer.rb
index 4e1ec1402ea..59331c827af 100644
--- a/lib/gitlab/url_sanitizer.rb
+++ b/lib/gitlab/url_sanitizer.rb
@@ -1,7 +1,9 @@
module Gitlab
class UrlSanitizer
+ ALLOWED_SCHEMES = %w[http https ssh git].freeze
+
def self.sanitize(content)
- regexp = URI::Parser.new.make_regexp(%w(http https ssh git))
+ regexp = URI::Parser.new.make_regexp(ALLOWED_SCHEMES)
content.gsub(regexp) { |url| new(url).masked_url }
rescue Addressable::URI::InvalidURIError
@@ -11,9 +13,9 @@ module Gitlab
def self.valid?(url)
return false unless url.present?
- Addressable::URI.parse(url.strip)
+ uri = Addressable::URI.parse(url.strip)
- true
+ ALLOWED_SCHEMES.include?(uri.scheme)
rescue Addressable::URI::InvalidURIError
false
end
@@ -68,6 +70,7 @@ module Gitlab
def generate_full_url
return @url unless valid_credentials?
+
@full_url = @url.dup
@full_url.password = credentials[:password] if credentials[:password].present?
diff --git a/lib/gitlab/usage_data.rb b/lib/gitlab/usage_data.rb
index 36708078136..37d3512990e 100644
--- a/lib/gitlab/usage_data.rb
+++ b/lib/gitlab/usage_data.rb
@@ -1,20 +1,35 @@
module Gitlab
class UsageData
class << self
- include Gitlab::CurrentSettings
-
def data(force_refresh: false)
Rails.cache.fetch('usage_data', force: force_refresh, expires_in: 2.weeks) { uncached_data }
end
def uncached_data
license_usage_data.merge(system_usage_data)
+ .merge(features_usage_data)
+ .merge(components_usage_data)
+ .merge(cycle_analytics_usage_data)
end
def to_json(force_refresh: false)
data(force_refresh: force_refresh).to_json
end
+ def license_usage_data
+ usage_data = {
+ uuid: Gitlab::CurrentSettings.uuid,
+ hostname: Gitlab.config.gitlab.host,
+ version: Gitlab::VERSION,
+ active_user_count: User.active.count,
+ recorded_at: Time.now,
+ mattermost_enabled: Gitlab.config.mattermost.enabled,
+ edition: 'CE'
+ }
+
+ usage_data
+ end
+
def system_usage_data
{
counts: {
@@ -32,6 +47,9 @@ module Gitlab
deploy_keys: DeployKey.count,
deployments: Deployment.count,
environments: ::Environment.count,
+ clusters: ::Clusters::Cluster.count,
+ clusters_enabled: ::Clusters::Cluster.enabled.count,
+ clusters_disabled: ::Clusters::Cluster.disabled.count,
in_review_folder: ::Environment.in_review_folder.count,
groups: Group.count,
issues: Issue.count,
@@ -54,18 +72,32 @@ module Gitlab
}
end
- def license_usage_data
- usage_data = {
- uuid: current_application_settings.uuid,
- hostname: Gitlab.config.gitlab.host,
- version: Gitlab::VERSION,
- active_user_count: User.active.count,
- recorded_at: Time.now,
- mattermost_enabled: Gitlab.config.mattermost.enabled,
- edition: 'CE'
+ def cycle_analytics_usage_data
+ Gitlab::CycleAnalytics::UsageData.new.to_json
+ end
+
+ def features_usage_data
+ features_usage_data_ce
+ end
+
+ def features_usage_data_ce
+ {
+ signup: Gitlab::CurrentSettings.allow_signup?,
+ ldap: Gitlab.config.ldap.enabled,
+ gravatar: Gitlab::CurrentSettings.gravatar_enabled?,
+ omniauth: Gitlab.config.omniauth.enabled,
+ reply_by_email: Gitlab::IncomingEmail.enabled?,
+ container_registry: Gitlab.config.registry.enabled,
+ gitlab_shared_runners: Gitlab.config.gitlab_ci.shared_runners_enabled
}
+ end
- usage_data
+ def components_usage_data
+ {
+ gitlab_pages: { enabled: Gitlab.config.pages.enabled, version: Gitlab::Pages::VERSION },
+ git: { version: Gitlab::Git.version },
+ database: { adapter: Gitlab::Database.adapter_name, version: Gitlab::Database.version }
+ }
end
def services_usage
diff --git a/lib/gitlab/user_access.rb b/lib/gitlab/user_access.rb
index d9a5af09f08..91b8bb2a83f 100644
--- a/lib/gitlab/user_access.rb
+++ b/lib/gitlab/user_access.rb
@@ -6,7 +6,8 @@ module Gitlab
[user&.id, project&.id]
end
- attr_reader :user, :project
+ attr_reader :user
+ attr_accessor :project
def initialize(user, project: nil)
@user = user
@@ -16,8 +17,10 @@ module Gitlab
def can_do_action?(action)
return false unless can_access_git?
- @permission_cache ||= {}
- @permission_cache[action] ||= user.can?(action, project)
+ permission_cache[action] =
+ permission_cache.fetch(action) do
+ user.can?(action, project)
+ end
end
def cannot_do_action?(action)
@@ -28,7 +31,7 @@ module Gitlab
return false unless can_access_git?
if user.requires_ldap_check? && user.try_obtain_ldap_lease
- return false unless Gitlab::LDAP::Access.allowed?(user)
+ return false unless Gitlab::Auth::LDAP::Access.allowed?(user)
end
true
@@ -62,7 +65,7 @@ module Gitlab
return false unless can_access_git?
if protected?(ProtectedBranch, project, ref)
- return true if project.empty_repo? && project.user_can_push_to_empty_repo?(user)
+ return true if project.user_can_push_to_empty_repo?(user)
protected_branch_accessible_to?(ref, action: :push)
else
@@ -88,6 +91,10 @@ module Gitlab
private
+ def permission_cache
+ @permission_cache ||= {}
+ end
+
def can_access_git?
user && user.can?(:access_git)
end
diff --git a/lib/gitlab/utils.rb b/lib/gitlab/utils.rb
index abb3d3a02c3..dc9391f32cf 100644
--- a/lib/gitlab/utils.rb
+++ b/lib/gitlab/utils.rb
@@ -27,6 +27,10 @@ module Gitlab
.gsub(/(\A-+|-+\z)/, '')
end
+ def remove_line_breaks(str)
+ str.gsub(/\r?\n/, '')
+ end
+
def to_boolean(value)
return value if [true, false].include?(value)
return true if value =~ /^(true|t|yes|y|1|on)$/i
@@ -46,5 +50,30 @@ module Gitlab
def random_string
Random.rand(Float::MAX.to_i).to_s(36)
end
+
+ # See: http://stackoverflow.com/questions/2108727/which-in-ruby-checking-if-program-exists-in-path-from-ruby
+ # Cross-platform way of finding an executable in the $PATH.
+ #
+ # which('ruby') #=> /usr/bin/ruby
+ def which(cmd, env = ENV)
+ exts = env['PATHEXT'] ? env['PATHEXT'].split(';') : ['']
+
+ env['PATH'].split(File::PATH_SEPARATOR).each do |path|
+ exts.each do |ext|
+ exe = File.join(path, "#{cmd}#{ext}")
+ return exe if File.executable?(exe) && !File.directory?(exe)
+ end
+ end
+
+ nil
+ end
+
+ # Used in EE
+ # Accepts either an Array or a String and returns an array
+ def ensure_array_from_string(string_or_array)
+ return string_or_array if string_or_array.is_a?(Array)
+
+ string_or_array.split(',').map(&:strip)
+ end
end
end
diff --git a/lib/gitlab/utils/merge_hash.rb b/lib/gitlab/utils/merge_hash.rb
new file mode 100644
index 00000000000..385141d44d0
--- /dev/null
+++ b/lib/gitlab/utils/merge_hash.rb
@@ -0,0 +1,117 @@
+module Gitlab
+ module Utils
+ module MergeHash
+ extend self
+ # Deep merges an array of hashes
+ #
+ # [{ hello: ["world"] },
+ # { hello: "Everyone" },
+ # { hello: { greetings: ['Bonjour', 'Hello', 'Hallo', 'Dzien dobry'] } },
+ # "Goodbye", "Hallo"]
+ # => [
+ # {
+ # hello:
+ # [
+ # "world",
+ # "Everyone",
+ # { greetings: ['Bonjour', 'Hello', 'Hallo', 'Dzien dobry'] }
+ # ]
+ # },
+ # "Goodbye"
+ # ]
+ def merge(elements)
+ merged, *other_elements = elements
+
+ other_elements.each do |element|
+ merged = merge_hash_tree(merged, element)
+ end
+
+ merged
+ end
+
+ # This extracts all keys and values from a hash into an array
+ #
+ # { hello: "world", this: { crushes: ["an entire", "hash"] } }
+ # => [:hello, "world", :this, :crushes, "an entire", "hash"]
+ def crush(array_or_hash)
+ if array_or_hash.is_a?(Array)
+ crush_array(array_or_hash)
+ else
+ crush_hash(array_or_hash)
+ end
+ end
+
+ private
+
+ def merge_hash_into_array(array, new_hash)
+ crushed_new_hash = crush_hash(new_hash)
+ # Merge the hash into an existing element of the array if there is overlap
+ if mergeable_index = array.index { |element| crushable?(element) && (crush(element) & crushed_new_hash).any? }
+ array[mergeable_index] = merge_hash_tree(array[mergeable_index], new_hash)
+ else
+ array << new_hash
+ end
+
+ array
+ end
+
+ def merge_hash_tree(first_element, second_element)
+ # If one of the elements is an object, and the other is a Hash or Array
+ # we can check if the object is already included. If so, we don't need to do anything
+ #
+ # Handled cases
+ # [Hash, Object], [Array, Object]
+ if crushable?(first_element) && crush(first_element).include?(second_element)
+ first_element
+ elsif crushable?(second_element) && crush(second_element).include?(first_element)
+ second_element
+ # When the first is an array, we need to go over every element to see if
+ # we can merge deeper. If no match is found, we add the element to the array
+ #
+ # Handled cases:
+ # [Array, Hash]
+ elsif first_element.is_a?(Array) && second_element.is_a?(Hash)
+ merge_hash_into_array(first_element, second_element)
+ elsif first_element.is_a?(Hash) && second_element.is_a?(Array)
+ merge_hash_into_array(second_element, first_element)
+ # If both of them are hashes, we can deep_merge with the same logic
+ #
+ # Handled cases:
+ # [Hash, Hash]
+ elsif first_element.is_a?(Hash) && second_element.is_a?(Hash)
+ first_element.deep_merge(second_element) { |key, first, second| merge_hash_tree(first, second) }
+ # If both elements are arrays, we try to merge each element separatly
+ #
+ # Handled cases
+ # [Array, Array]
+ elsif first_element.is_a?(Array) && second_element.is_a?(Array)
+ first_element.map { |child_element| merge_hash_tree(child_element, second_element) }
+ # If one or both elements are a GroupDescendant, we wrap create an array
+ # combining them.
+ #
+ # Handled cases:
+ # [Object, Object], [Array, Array]
+ else
+ (Array.wrap(first_element) + Array.wrap(second_element)).uniq
+ end
+ end
+
+ def crushable?(element)
+ element.is_a?(Hash) || element.is_a?(Array)
+ end
+
+ def crush_hash(hash)
+ hash.flat_map do |key, value|
+ crushed_value = crushable?(value) ? crush(value) : value
+ Array.wrap(key) + Array.wrap(crushed_value)
+ end
+ end
+
+ def crush_array(array)
+ array.flat_map do |element|
+ crushable?(element) ? crush(element) : element
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/utils/override.rb b/lib/gitlab/utils/override.rb
new file mode 100644
index 00000000000..8bf6bcb1fe2
--- /dev/null
+++ b/lib/gitlab/utils/override.rb
@@ -0,0 +1,111 @@
+module Gitlab
+ module Utils
+ module Override
+ class Extension
+ def self.verify_class!(klass, method_name)
+ instance_method_defined?(klass, method_name) ||
+ raise(
+ NotImplementedError.new(
+ "#{klass}\##{method_name} doesn't exist!"))
+ end
+
+ def self.instance_method_defined?(klass, name, include_super: true)
+ klass.instance_methods(include_super).include?(name) ||
+ klass.private_instance_methods(include_super).include?(name)
+ end
+
+ attr_reader :subject
+
+ def initialize(subject)
+ @subject = subject
+ end
+
+ def add_method_name(method_name)
+ method_names << method_name
+ end
+
+ def add_class(klass)
+ classes << klass
+ end
+
+ def verify!
+ classes.each do |klass|
+ index = klass.ancestors.index(subject)
+ parents = klass.ancestors.drop(index + 1)
+
+ method_names.each do |method_name|
+ parents.any? do |parent|
+ self.class.instance_method_defined?(
+ parent, method_name, include_super: false)
+ end ||
+ raise(
+ NotImplementedError.new(
+ "#{klass}\##{method_name} doesn't exist!"))
+ end
+ end
+ end
+
+ private
+
+ def method_names
+ @method_names ||= []
+ end
+
+ def classes
+ @classes ||= []
+ end
+ end
+
+ # Instead of writing patterns like this:
+ #
+ # def f
+ # raise NotImplementedError unless defined?(super)
+ #
+ # true
+ # end
+ #
+ # We could write it like:
+ #
+ # extend ::Gitlab::Utils::Override
+ #
+ # override :f
+ # def f
+ # true
+ # end
+ #
+ # This would make sure we're overriding something. See:
+ # https://gitlab.com/gitlab-org/gitlab-ee/issues/1819
+ def override(method_name)
+ return unless ENV['STATIC_VERIFICATION']
+
+ if is_a?(Class)
+ Extension.verify_class!(self, method_name)
+ else # We delay the check for modules
+ Override.extensions[self] ||= Extension.new(self)
+ Override.extensions[self].add_method_name(method_name)
+ end
+ end
+
+ def included(base = nil)
+ return super if base.nil? # Rails concern, ignoring it
+
+ super
+
+ if base.is_a?(Class) # We could check for Class in `override`
+ # This could be `nil` if `override` was never called
+ Override.extensions[self]&.add_class(base)
+ end
+ end
+
+ alias_method :prepended, :included
+
+ def self.extensions
+ @extensions ||= {}
+ end
+
+ def self.verify!
+ extensions.values.each(&:verify!)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/utils/strong_memoize.rb b/lib/gitlab/utils/strong_memoize.rb
new file mode 100644
index 00000000000..fe091f4611b
--- /dev/null
+++ b/lib/gitlab/utils/strong_memoize.rb
@@ -0,0 +1,41 @@
+module Gitlab
+ module Utils
+ module StrongMemoize
+ # Instead of writing patterns like this:
+ #
+ # def trigger_from_token
+ # return @trigger if defined?(@trigger)
+ #
+ # @trigger = Ci::Trigger.find_by_token(params[:token].to_s)
+ # end
+ #
+ # We could write it like:
+ #
+ # include Gitlab::Utils::StrongMemoize
+ #
+ # def trigger_from_token
+ # strong_memoize(:trigger) do
+ # Ci::Trigger.find_by_token(params[:token].to_s)
+ # end
+ # end
+ #
+ def strong_memoize(name)
+ if instance_variable_defined?(ivar(name))
+ instance_variable_get(ivar(name))
+ else
+ instance_variable_set(ivar(name), yield)
+ end
+ end
+
+ def clear_memoization(name)
+ remove_instance_variable(ivar(name)) if instance_variable_defined?(ivar(name))
+ end
+
+ private
+
+ def ivar(name)
+ "@#{name}"
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/verify/batch_verifier.rb b/lib/gitlab/verify/batch_verifier.rb
new file mode 100644
index 00000000000..1ef369a4b67
--- /dev/null
+++ b/lib/gitlab/verify/batch_verifier.rb
@@ -0,0 +1,64 @@
+module Gitlab
+ module Verify
+ class BatchVerifier
+ attr_reader :batch_size, :start, :finish
+
+ def initialize(batch_size:, start: nil, finish: nil)
+ @batch_size = batch_size
+ @start = start
+ @finish = finish
+ end
+
+ # Yields a Range of IDs and a Hash of failed verifications (object => error)
+ def run_batches(&blk)
+ relation.in_batches(of: batch_size, start: start, finish: finish) do |relation| # rubocop: disable Cop/InBatches
+ range = relation.first.id..relation.last.id
+ failures = run_batch(relation)
+
+ yield(range, failures)
+ end
+ end
+
+ def name
+ raise NotImplementedError.new
+ end
+
+ def describe(_object)
+ raise NotImplementedError.new
+ end
+
+ private
+
+ def run_batch(relation)
+ relation.map { |upload| verify(upload) }.compact.to_h
+ end
+
+ def verify(object)
+ expected = expected_checksum(object)
+ actual = actual_checksum(object)
+
+ raise 'Checksum missing' unless expected.present?
+ raise 'Checksum mismatch' unless expected == actual
+
+ nil
+ rescue => err
+ [object, err]
+ end
+
+ # This should return an ActiveRecord::Relation suitable for calling #in_batches on
+ def relation
+ raise NotImplementedError.new
+ end
+
+ # The checksum we expect the object to have
+ def expected_checksum(_object)
+ raise NotImplementedError.new
+ end
+
+ # The freshly-recalculated checksum of the object
+ def actual_checksum(_object)
+ raise NotImplementedError.new
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/verify/lfs_objects.rb b/lib/gitlab/verify/lfs_objects.rb
new file mode 100644
index 00000000000..fe51edbdeeb
--- /dev/null
+++ b/lib/gitlab/verify/lfs_objects.rb
@@ -0,0 +1,27 @@
+module Gitlab
+ module Verify
+ class LfsObjects < BatchVerifier
+ def name
+ 'LFS objects'
+ end
+
+ def describe(object)
+ "LFS object: #{object.oid}"
+ end
+
+ private
+
+ def relation
+ LfsObject.all
+ end
+
+ def expected_checksum(lfs_object)
+ lfs_object.oid
+ end
+
+ def actual_checksum(lfs_object)
+ LfsObject.calculate_oid(lfs_object.file.path)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/verify/rake_task.rb b/lib/gitlab/verify/rake_task.rb
new file mode 100644
index 00000000000..dd138e6b92b
--- /dev/null
+++ b/lib/gitlab/verify/rake_task.rb
@@ -0,0 +1,53 @@
+module Gitlab
+ module Verify
+ class RakeTask
+ def self.run!(verify_kls)
+ verifier = verify_kls.new(
+ batch_size: ENV.fetch('BATCH', 200).to_i,
+ start: ENV['ID_FROM'],
+ finish: ENV['ID_TO']
+ )
+
+ verbose = Gitlab::Utils.to_boolean(ENV['VERBOSE'])
+
+ new(verifier, verbose).run!
+ end
+
+ attr_reader :verifier, :output
+
+ def initialize(verifier, verbose)
+ @verifier = verifier
+ @verbose = verbose
+ end
+
+ def run!
+ say "Checking integrity of #{verifier.name}"
+
+ verifier.run_batches { |*args| run_batch(*args) }
+
+ say 'Done!'
+ end
+
+ def verbose?
+ !!@verbose
+ end
+
+ private
+
+ def say(text)
+ puts(text) # rubocop:disable Rails/Output
+ end
+
+ def run_batch(range, failures)
+ status_color = failures.empty? ? :green : :red
+ say "- #{range}: Failures: #{failures.count}".color(status_color)
+
+ return unless verbose?
+
+ failures.each do |object, error|
+ say " - #{verifier.describe(object)}: #{error.inspect}".color(:red)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/verify/uploads.rb b/lib/gitlab/verify/uploads.rb
new file mode 100644
index 00000000000..6972e517ea5
--- /dev/null
+++ b/lib/gitlab/verify/uploads.rb
@@ -0,0 +1,27 @@
+module Gitlab
+ module Verify
+ class Uploads < BatchVerifier
+ def name
+ 'Uploads'
+ end
+
+ def describe(object)
+ "Upload: #{object.id}"
+ end
+
+ private
+
+ def relation
+ Upload.all
+ end
+
+ def expected_checksum(upload)
+ upload.checksum
+ end
+
+ def actual_checksum(upload)
+ Upload.hexdigest(upload.absolute_path)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/view/presenter/factory.rb b/lib/gitlab/view/presenter/factory.rb
index d172d61e2c9..570f0723e39 100644
--- a/lib/gitlab/view/presenter/factory.rb
+++ b/lib/gitlab/view/presenter/factory.rb
@@ -16,7 +16,7 @@ module Gitlab
attr_reader :subject, :attributes
def presenter_class
- "#{subject.class.name}Presenter".constantize
+ attributes.delete(:presenter_class) { "#{subject.class.name}Presenter".constantize }
end
end
end
diff --git a/lib/gitlab/visibility_level.rb b/lib/gitlab/visibility_level.rb
index c60bd91ea6e..2612208a927 100644
--- a/lib/gitlab/visibility_level.rb
+++ b/lib/gitlab/visibility_level.rb
@@ -5,7 +5,6 @@
#
module Gitlab
module VisibilityLevel
- extend CurrentSettings
extend ActiveSupport::Concern
included do
@@ -57,11 +56,17 @@ module Gitlab
}
end
- def highest_allowed_level
- restricted_levels = current_application_settings.restricted_visibility_levels
+ def allowed_levels
+ restricted_levels = Gitlab::CurrentSettings.restricted_visibility_levels
- allowed_levels = self.values - restricted_levels
- allowed_levels.max || PRIVATE
+ self.values - Array(restricted_levels)
+ end
+
+ def closest_allowed_level(target_level)
+ highest_allowed_level = allowed_levels.select { |level| level <= target_level }.max
+
+ # If all levels are restricted, fall back to PRIVATE
+ highest_allowed_level || PRIVATE
end
def allowed_for?(user, level)
@@ -75,7 +80,7 @@ module Gitlab
end
def non_restricted_level?(level)
- restricted_levels = current_application_settings.restricted_visibility_levels
+ restricted_levels = Gitlab::CurrentSettings.restricted_visibility_levels
if restricted_levels.nil?
true
@@ -99,6 +104,7 @@ module Gitlab
def level_value(level)
return level.to_i if level.to_i.to_s == level.to_s && string_options.key(level.to_i)
+
string_options[level] || PRIVATE
end
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index 17550cf9074..823df67ea39 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -16,15 +16,16 @@ module Gitlab
SECRET_LENGTH = 32
class << self
- def git_http_ok(repository, is_wiki, user, action)
+ def git_http_ok(repository, is_wiki, user, action, show_all_refs: false)
project = repository.project
repo_path = repository.path_to_repo
params = {
GL_ID: Gitlab::GlId.gl_id(user),
GL_REPOSITORY: Gitlab::GlRepository.gl_repository(project, is_wiki),
- RepoPath: repo_path
+ GL_USERNAME: user&.username,
+ RepoPath: repo_path,
+ ShowAllRefs: show_all_refs
}
-
server = {
address: Gitlab::GitalyClient.address(project.repository_storage),
token: Gitlab::GitalyClient.token(project.repository_storage)
@@ -33,7 +34,10 @@ module Gitlab
feature_enabled = case action.to_s
when 'git_receive_pack'
- Gitlab::GitalyClient.feature_enabled?(:post_receive_pack)
+ Gitlab::GitalyClient.feature_enabled?(
+ :post_receive_pack,
+ status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT
+ )
when 'git_upload_pack'
true
when 'info_refs'
@@ -41,6 +45,7 @@ module Gitlab
else
raise "Unsupported action: #{action}"
end
+
if feature_enabled
params[:GitalyServer] = server
end
@@ -50,14 +55,14 @@ module Gitlab
def lfs_upload_ok(oid, size)
{
- StoreLFSPath: "#{Gitlab.config.lfs.storage_path}/tmp/upload",
+ StoreLFSPath: LfsObjectUploader.workhorse_upload_path,
LfsOid: oid,
LfsSize: size
}
end
def artifact_upload_ok
- { TempPath: ArtifactUploader.artifacts_upload_path }
+ { TempPath: JobArtifactUploader.workhorse_upload_path }
end
def send_git_blob(repository, blob)
@@ -89,6 +94,16 @@ module Gitlab
params = repository.archive_metadata(ref, Gitlab.config.gitlab.repository_downloads_path, format)
raise "Repository or ref not found" if params.empty?
+ if Gitlab::GitalyClient.feature_enabled?(:workhorse_archive)
+ params.merge!(
+ 'GitalyServer' => gitaly_server_hash(repository),
+ 'GitalyRepository' => repository.gitaly_repository.to_h
+ )
+ end
+
+ # If present DisableCache must be a Boolean. Otherwise workhorse ignores it.
+ params['DisableCache'] = true if git_archive_cache_disabled?
+
[
SEND_DATA_HEADER,
"git-archive:#{encode(params)}"
@@ -96,11 +111,16 @@ module Gitlab
end
def send_git_diff(repository, diff_refs)
- params = {
- 'RepoPath' => repository.path_to_repo,
- 'ShaFrom' => diff_refs.base_sha,
- 'ShaTo' => diff_refs.head_sha
- }
+ params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_send_git_diff)
+ {
+ 'GitalyServer' => gitaly_server_hash(repository),
+ 'RawDiffRequest' => Gitaly::RawDiffRequest.new(
+ gitaly_diff_or_patch_hash(repository, diff_refs)
+ ).to_json
+ }
+ else
+ workhorse_diff_or_patch_hash(repository, diff_refs)
+ end
[
SEND_DATA_HEADER,
@@ -109,11 +129,16 @@ module Gitlab
end
def send_git_patch(repository, diff_refs)
- params = {
- 'RepoPath' => repository.path_to_repo,
- 'ShaFrom' => diff_refs.base_sha,
- 'ShaTo' => diff_refs.head_sha
- }
+ params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_send_git_patch)
+ {
+ 'GitalyServer' => gitaly_server_hash(repository),
+ 'RawPatchRequest' => Gitaly::RawPatchRequest.new(
+ gitaly_diff_or_patch_hash(repository, diff_refs)
+ ).to_json
+ }
+ else
+ workhorse_diff_or_patch_hash(repository, diff_refs)
+ end
[
SEND_DATA_HEADER,
@@ -122,8 +147,11 @@ module Gitlab
end
def send_artifacts_entry(build, entry)
+ file = build.artifacts_file
+ archive = file.file_storage? ? file.path : file.url
+
params = {
- 'Archive' => build.artifacts_file.path,
+ 'Archive' => archive,
'Entry' => Base64.encode64(entry.to_s)
}
@@ -133,6 +161,18 @@ module Gitlab
]
end
+ def send_url(url, allow_redirects: false)
+ params = {
+ 'URL' => url,
+ 'AllowRedirects' => allow_redirects
+ }
+
+ [
+ SEND_DATA_HEADER,
+ "send-url:#{encode(params)}"
+ ]
+ end
+
def terminal_websocket(terminal)
details = {
'Terminal' => {
@@ -156,6 +196,7 @@ module Gitlab
@secret ||= begin
bytes = Base64.strict_decode64(File.read(secret_path).chomp)
raise "#{secret_path} does not contain #{SECRET_LENGTH} bytes" if bytes.length != SECRET_LENGTH
+
bytes
end
end
@@ -209,6 +250,26 @@ module Gitlab
token: Gitlab::GitalyClient.token(repository.project.repository_storage)
}
end
+
+ def workhorse_diff_or_patch_hash(repository, diff_refs)
+ {
+ 'RepoPath' => repository.path_to_repo,
+ 'ShaFrom' => diff_refs.base_sha,
+ 'ShaTo' => diff_refs.head_sha
+ }
+ end
+
+ def gitaly_diff_or_patch_hash(repository, diff_refs)
+ {
+ repository: repository.gitaly_repository,
+ left_commit_id: diff_refs.base_sha,
+ right_commit_id: diff_refs.head_sha
+ }
+ end
+
+ def git_archive_cache_disabled?
+ ENV['WORKHORSE_ARCHIVE_CACHE_DISABLED'].present? || Feature.enabled?(:workhorse_archive_cache_disabled)
+ end
end
end
end
diff --git a/lib/google_api/auth.rb b/lib/google_api/auth.rb
new file mode 100644
index 00000000000..99a82c849e0
--- /dev/null
+++ b/lib/google_api/auth.rb
@@ -0,0 +1,54 @@
+module GoogleApi
+ class Auth
+ attr_reader :access_token, :redirect_uri, :state
+
+ ConfigMissingError = Class.new(StandardError)
+
+ def initialize(access_token, redirect_uri, state: nil)
+ @access_token = access_token
+ @redirect_uri = redirect_uri
+ @state = state
+ end
+
+ def authorize_url
+ client.auth_code.authorize_url(
+ redirect_uri: redirect_uri,
+ scope: scope,
+ state: state # This is used for arbitary redirection
+ )
+ end
+
+ def get_token(code)
+ ret = client.auth_code.get_token(code, redirect_uri: redirect_uri)
+ return ret.token, ret.expires_at
+ end
+
+ protected
+
+ def scope
+ raise NotImplementedError
+ end
+
+ private
+
+ def config
+ Gitlab.config.omniauth.providers.find { |provider| provider.name == "google_oauth2" }
+ end
+
+ def client
+ return @client if defined?(@client)
+
+ unless config
+ raise ConfigMissingError
+ end
+
+ @client = ::OAuth2::Client.new(
+ config.app_id,
+ config.app_secret,
+ site: 'https://accounts.google.com',
+ token_url: '/o/oauth2/token',
+ authorize_url: '/o/oauth2/auth'
+ )
+ end
+ end
+end
diff --git a/lib/google_api/cloud_platform/client.rb b/lib/google_api/cloud_platform/client.rb
new file mode 100644
index 00000000000..f30dd995695
--- /dev/null
+++ b/lib/google_api/cloud_platform/client.rb
@@ -0,0 +1,115 @@
+require 'google/apis/container_v1'
+require 'google/apis/cloudbilling_v1'
+require 'google/apis/cloudresourcemanager_v1'
+
+module GoogleApi
+ module CloudPlatform
+ class Client < GoogleApi::Auth
+ SCOPE = 'https://www.googleapis.com/auth/cloud-platform'.freeze
+ LEAST_TOKEN_LIFE_TIME = 10.minutes
+
+ class << self
+ def session_key_for_token
+ :cloud_platform_access_token
+ end
+
+ def session_key_for_expires_at
+ :cloud_platform_expires_at
+ end
+
+ def new_session_key_for_redirect_uri
+ SecureRandom.hex.tap do |state|
+ yield session_key_for_redirect_uri(state)
+ end
+ end
+
+ def session_key_for_redirect_uri(state)
+ "cloud_platform_second_redirect_uri_#{state}"
+ end
+ end
+
+ def scope
+ SCOPE
+ end
+
+ def validate_token(expires_at)
+ return false unless access_token
+ return false unless expires_at
+
+ # Making sure that the token will have been still alive during the cluster creation.
+ return false if token_life_time(expires_at) < LEAST_TOKEN_LIFE_TIME
+
+ true
+ end
+
+ def projects_list
+ service = Google::Apis::CloudresourcemanagerV1::CloudResourceManagerService.new
+ service.authorization = access_token
+
+ service.fetch_all(items: :projects) do |token|
+ service.list_projects(page_token: token, options: user_agent_header)
+ end
+ end
+
+ def projects_get_billing_info(project_id)
+ service = Google::Apis::CloudbillingV1::CloudbillingService.new
+ service.authorization = access_token
+
+ service.get_project_billing_info("projects/#{project_id}", options: user_agent_header)
+ end
+
+ def projects_zones_clusters_get(project_id, zone, cluster_id)
+ service = Google::Apis::ContainerV1::ContainerService.new
+ service.authorization = access_token
+
+ service.get_zone_cluster(project_id, zone, cluster_id, options: user_agent_header)
+ end
+
+ def projects_zones_clusters_create(project_id, zone, cluster_name, cluster_size, machine_type:)
+ service = Google::Apis::ContainerV1::ContainerService.new
+ service.authorization = access_token
+
+ request_body = Google::Apis::ContainerV1::CreateClusterRequest.new(
+ {
+ "cluster": {
+ "name": cluster_name,
+ "initial_node_count": cluster_size,
+ "node_config": {
+ "machine_type": machine_type
+ },
+ "legacy_abac": {
+ "enabled": true
+ }
+ }
+ }
+ )
+
+ service.create_cluster(project_id, zone, request_body, options: user_agent_header)
+ end
+
+ def projects_zones_operations(project_id, zone, operation_id)
+ service = Google::Apis::ContainerV1::ContainerService.new
+ service.authorization = access_token
+
+ service.get_zone_operation(project_id, zone, operation_id, options: user_agent_header)
+ end
+
+ def parse_operation_id(self_link)
+ m = self_link.match(%r{projects/.*/zones/.*/operations/(.*)})
+ m[1] if m
+ end
+
+ private
+
+ def token_life_time(expires_at)
+ DateTime.strptime(expires_at, '%s').to_time.utc - Time.now.utc
+ end
+
+ def user_agent_header
+ Google::Apis::RequestOptions.new.tap do |options|
+ options.header = { 'User-Agent': "GitLab/#{Gitlab::VERSION.match('(\d+\.\d+)').captures.first} (GPN:GitLab;)" }
+ end
+ end
+ end
+ end
+end
diff --git a/lib/haml_lint/inline_javascript.rb b/lib/haml_lint/inline_javascript.rb
index 05668c69006..4f776330e80 100644
--- a/lib/haml_lint/inline_javascript.rb
+++ b/lib/haml_lint/inline_javascript.rb
@@ -9,6 +9,13 @@ unless Rails.env.production?
def visit_filter(node)
return unless node.filter_type == 'javascript'
+
+ record_lint(node, 'Inline JavaScript is discouraged (https://docs.gitlab.com/ee/development/gotchas.html#do-not-use-inline-javascript-in-views)')
+ end
+
+ def visit_tag(node)
+ return unless node.tag_name == 'script'
+
record_lint(node, 'Inline JavaScript is discouraged (https://docs.gitlab.com/ee/development/gotchas.html#do-not-use-inline-javascript-in-views)')
end
end
diff --git a/lib/milestone_array.rb b/lib/milestone_array.rb
new file mode 100644
index 00000000000..4ed8485b36a
--- /dev/null
+++ b/lib/milestone_array.rb
@@ -0,0 +1,40 @@
+module MilestoneArray
+ class << self
+ def sort(array, sort_method)
+ case sort_method
+ when 'due_date_asc'
+ sort_asc_nulls_last(array, 'due_date')
+ when 'due_date_desc'
+ sort_desc_nulls_last(array, 'due_date')
+ when 'start_date_asc'
+ sort_asc_nulls_last(array, 'start_date')
+ when 'start_date_desc'
+ sort_desc_nulls_last(array, 'start_date')
+ when 'name_asc'
+ sort_asc(array, 'title')
+ when 'name_desc'
+ sort_asc(array, 'title').reverse
+ else
+ array
+ end
+ end
+
+ private
+
+ def sort_asc_nulls_last(array, attribute)
+ attribute = attribute.to_sym
+
+ array.select(&attribute).sort_by(&attribute) + array.reject(&attribute)
+ end
+
+ def sort_desc_nulls_last(array, attribute)
+ attribute = attribute.to_sym
+
+ array.select(&attribute).sort_by(&attribute).reverse + array.reject(&attribute)
+ end
+
+ def sort_asc(array, attribute)
+ array.sort_by(&attribute.to_sym)
+ end
+ end
+end
diff --git a/lib/omni_auth/strategies/bitbucket.rb b/lib/omni_auth/strategies/bitbucket.rb
index 5a7d67c2390..ce1bdfe6ee4 100644
--- a/lib/omni_auth/strategies/bitbucket.rb
+++ b/lib/omni_auth/strategies/bitbucket.rb
@@ -36,6 +36,10 @@ module OmniAuth
email_response = access_token.get('api/2.0/user/emails').parsed
@emails ||= email_response && email_response['values'] || nil
end
+
+ def callback_url
+ options[:redirect_uri] || (full_host + script_name + callback_path)
+ end
end
end
end
diff --git a/lib/peek/views/gitaly.rb b/lib/peek/views/gitaly.rb
new file mode 100644
index 00000000000..d519d8e86fa
--- /dev/null
+++ b/lib/peek/views/gitaly.rb
@@ -0,0 +1,34 @@
+module Peek
+ module Views
+ class Gitaly < View
+ def duration
+ ::Gitlab::GitalyClient.query_time
+ end
+
+ def calls
+ ::Gitlab::GitalyClient.get_request_count
+ end
+
+ def results
+ { duration: formatted_duration, calls: calls }
+ end
+
+ private
+
+ def formatted_duration
+ ms = duration * 1000
+ if ms >= 1000
+ "%.2fms" % ms
+ else
+ "%.0fms" % ms
+ end
+ end
+
+ def setup_subscribers
+ subscribe 'start_processing.action_controller' do
+ ::Gitlab::GitalyClient.query_time = 0
+ end
+ end
+ end
+ end
+end
diff --git a/lib/rouge/lexers/math.rb b/lib/rouge/lexers/math.rb
deleted file mode 100644
index 939b23a3421..00000000000
--- a/lib/rouge/lexers/math.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-module Rouge
- module Lexers
- class Math < PlainText
- title "A passthrough lexer used for LaTeX input"
- desc "PLEASE REFACTOR - this should be handled by SyntaxHighlightFilter"
- tag 'math'
- end
- end
-end
diff --git a/lib/rouge/lexers/plantuml.rb b/lib/rouge/lexers/plantuml.rb
deleted file mode 100644
index 63c461764fc..00000000000
--- a/lib/rouge/lexers/plantuml.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-module Rouge
- module Lexers
- class Plantuml < PlainText
- title "A passthrough lexer used for PlantUML input"
- desc "PLEASE REFACTOR - this should be handled by SyntaxHighlightFilter"
- tag 'plantuml'
- end
- end
-end
diff --git a/lib/rspec_flaky/config.rb b/lib/rspec_flaky/config.rb
new file mode 100644
index 00000000000..a17ae55910e
--- /dev/null
+++ b/lib/rspec_flaky/config.rb
@@ -0,0 +1,21 @@
+require 'json'
+
+module RspecFlaky
+ class Config
+ def self.generate_report?
+ ENV['FLAKY_RSPEC_GENERATE_REPORT'] == 'true'
+ end
+
+ def self.suite_flaky_examples_report_path
+ ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/suite-report.json")
+ end
+
+ def self.flaky_examples_report_path
+ ENV['FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/report.json")
+ end
+
+ def self.new_flaky_examples_report_path
+ ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/new-report.json")
+ end
+ end
+end
diff --git a/lib/rspec_flaky/flaky_example.rb b/lib/rspec_flaky/flaky_example.rb
index f81fb90e870..6be24014d89 100644
--- a/lib/rspec_flaky/flaky_example.rb
+++ b/lib/rspec_flaky/flaky_example.rb
@@ -9,24 +9,21 @@ module RspecFlaky
line: example.line,
description: example.description,
last_attempts_count: example.attempts,
- flaky_reports: 1)
+ flaky_reports: 0)
else
super
end
end
- def first_flaky_at
- self[:first_flaky_at] || Time.now
- end
-
- def last_flaky_at
- Time.now
- end
+ def update_flakiness!(last_attempts_count: nil)
+ self.first_flaky_at ||= Time.now
+ self.last_flaky_at = Time.now
+ self.flaky_reports += 1
+ self.last_attempts_count = last_attempts_count if last_attempts_count
- def last_flaky_job
- return unless ENV['CI_PROJECT_URL'] && ENV['CI_JOB_ID']
-
- "#{ENV['CI_PROJECT_URL']}/-/jobs/#{ENV['CI_JOB_ID']}"
+ if ENV['CI_PROJECT_URL'] && ENV['CI_JOB_ID']
+ self.last_flaky_job = "#{ENV['CI_PROJECT_URL']}/-/jobs/#{ENV['CI_JOB_ID']}"
+ end
end
def to_h
diff --git a/lib/rspec_flaky/flaky_examples_collection.rb b/lib/rspec_flaky/flaky_examples_collection.rb
new file mode 100644
index 00000000000..973c95b0212
--- /dev/null
+++ b/lib/rspec_flaky/flaky_examples_collection.rb
@@ -0,0 +1,37 @@
+require 'json'
+
+module RspecFlaky
+ class FlakyExamplesCollection < SimpleDelegator
+ def self.from_json(json)
+ new(JSON.parse(json))
+ end
+
+ def initialize(collection = {})
+ unless collection.is_a?(Hash)
+ raise ArgumentError, "`collection` must be a Hash, #{collection.class} given!"
+ end
+
+ collection_of_flaky_examples =
+ collection.map do |uid, example|
+ [
+ uid,
+ example.is_a?(RspecFlaky::FlakyExample) ? example : RspecFlaky::FlakyExample.new(example)
+ ]
+ end
+
+ super(Hash[collection_of_flaky_examples])
+ end
+
+ def to_report
+ Hash[map { |uid, example| [uid, example.to_h] }].deep_symbolize_keys
+ end
+
+ def -(other)
+ unless other.respond_to?(:key)
+ raise ArgumentError, "`other` must respond to `#key?`, #{other.class} does not!"
+ end
+
+ self.class.new(reject { |uid, _| other.key?(uid) })
+ end
+ end
+end
diff --git a/lib/rspec_flaky/listener.rb b/lib/rspec_flaky/listener.rb
index ec2fbd9e36c..4a5bfec9967 100644
--- a/lib/rspec_flaky/listener.rb
+++ b/lib/rspec_flaky/listener.rb
@@ -2,11 +2,15 @@ require 'json'
module RspecFlaky
class Listener
- attr_reader :all_flaky_examples, :new_flaky_examples
-
- def initialize
- @new_flaky_examples = {}
- @all_flaky_examples = init_all_flaky_examples
+ # - suite_flaky_examples: contains all the currently tracked flacky example
+ # for the whole RSpec suite
+ # - flaky_examples: contains the examples detected as flaky during the
+ # current RSpec run
+ attr_reader :suite_flaky_examples, :flaky_examples
+
+ def initialize(suite_flaky_examples_json = nil)
+ @flaky_examples = FlakyExamplesCollection.new
+ @suite_flaky_examples = init_suite_flaky_examples(suite_flaky_examples_json)
end
def example_passed(notification)
@@ -14,29 +18,21 @@ module RspecFlaky
return unless current_example.attempts > 1
- flaky_example_hash = all_flaky_examples[current_example.uid]
-
- all_flaky_examples[current_example.uid] =
- if flaky_example_hash
- FlakyExample.new(flaky_example_hash).tap do |ex|
- ex.last_attempts_count = current_example.attempts
- ex.flaky_reports += 1
- end
- else
- FlakyExample.new(current_example).tap do |ex|
- new_flaky_examples[current_example.uid] = ex
- end
- end
+ flaky_example = suite_flaky_examples.fetch(current_example.uid) { FlakyExample.new(current_example) }
+ flaky_example.update_flakiness!(last_attempts_count: current_example.attempts)
+
+ flaky_examples[current_example.uid] = flaky_example
end
def dump_summary(_)
- write_report_file(all_flaky_examples, all_flaky_examples_report_path)
+ write_report_file(flaky_examples, RspecFlaky::Config.flaky_examples_report_path)
+ new_flaky_examples = flaky_examples - suite_flaky_examples
if new_flaky_examples.any?
Rails.logger.warn "\nNew flaky examples detected:\n"
- Rails.logger.warn JSON.pretty_generate(to_report(new_flaky_examples))
+ Rails.logger.warn JSON.pretty_generate(new_flaky_examples.to_report)
- write_report_file(new_flaky_examples, new_flaky_examples_report_path)
+ write_report_file(new_flaky_examples, RspecFlaky::Config.new_flaky_examples_report_path)
end
end
@@ -46,30 +42,23 @@ module RspecFlaky
private
- def init_all_flaky_examples
- return {} unless File.exist?(all_flaky_examples_report_path)
+ def init_suite_flaky_examples(suite_flaky_examples_json = nil)
+ unless suite_flaky_examples_json
+ return {} unless File.exist?(RspecFlaky::Config.suite_flaky_examples_report_path)
- all_flaky_examples = JSON.parse(File.read(all_flaky_examples_report_path))
+ suite_flaky_examples_json = File.read(RspecFlaky::Config.suite_flaky_examples_report_path)
+ end
- Hash[(all_flaky_examples || {}).map { |k, ex| [k, FlakyExample.new(ex)] }]
+ FlakyExamplesCollection.from_json(suite_flaky_examples_json)
end
- def write_report_file(examples, file_path)
- return unless ENV['FLAKY_RSPEC_GENERATE_REPORT'] == 'true'
+ def write_report_file(examples_collection, file_path)
+ return unless RspecFlaky::Config.generate_report?
report_path_dir = File.dirname(file_path)
FileUtils.mkdir_p(report_path_dir) unless Dir.exist?(report_path_dir)
- File.write(file_path, JSON.pretty_generate(to_report(examples)))
- end
-
- def all_flaky_examples_report_path
- @all_flaky_examples_report_path ||= ENV['ALL_FLAKY_RSPEC_REPORT_PATH'] ||
- Rails.root.join("rspec_flaky/all-report.json")
- end
- def new_flaky_examples_report_path
- @new_flaky_examples_report_path ||= ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] ||
- Rails.root.join("rspec_flaky/new-report.json")
+ File.write(file_path, JSON.pretty_generate(examples_collection.to_report))
end
end
end
diff --git a/lib/support/nginx/gitlab b/lib/support/nginx/gitlab
index 54f51d9d633..0e27a28ea6e 100644
--- a/lib/support/nginx/gitlab
+++ b/lib/support/nginx/gitlab
@@ -17,6 +17,8 @@
## See installation.md#using-https for additional HTTPS configuration details.
upstream gitlab-workhorse {
+ # Gitlab socket file,
+ # for Omnibus this would be: unix:/var/opt/gitlab/gitlab-workhorse/socket
server unix:/home/git/gitlab/tmp/sockets/gitlab-workhorse.socket fail_timeout=0;
}
@@ -110,6 +112,8 @@ server {
error_page 502 /502.html;
error_page 503 /503.html;
location ~ ^/(404|422|500|502|503)\.html$ {
+ # Location to the Gitlab's public directory,
+ # for Omnibus this would be: /opt/gitlab/embedded/service/gitlab-rails/public.
root /home/git/gitlab/public;
internal;
}
diff --git a/lib/support/nginx/gitlab-ssl b/lib/support/nginx/gitlab-ssl
index ed8131ef24f..8218d68f9ba 100644
--- a/lib/support/nginx/gitlab-ssl
+++ b/lib/support/nginx/gitlab-ssl
@@ -21,6 +21,8 @@
## See installation.md#using-https for additional HTTPS configuration details.
upstream gitlab-workhorse {
+ # Gitlab socket file,
+ # for Omnibus this would be: unix:/var/opt/gitlab/gitlab-workhorse/socket
server unix:/home/git/gitlab/tmp/sockets/gitlab-workhorse.socket fail_timeout=0;
}
@@ -160,6 +162,8 @@ server {
error_page 502 /502.html;
error_page 503 /503.html;
location ~ ^/(404|422|500|502|503)\.html$ {
+ # Location to the Gitlab's public directory,
+ # for Omnibus this would be: /opt/gitlab/embedded/service/gitlab-rails/public
root /home/git/gitlab/public;
internal;
}
diff --git a/lib/system_check/app/git_user_default_ssh_config_check.rb b/lib/system_check/app/git_user_default_ssh_config_check.rb
index 7b486d78cf0..ad41760dff2 100644
--- a/lib/system_check/app/git_user_default_ssh_config_check.rb
+++ b/lib/system_check/app/git_user_default_ssh_config_check.rb
@@ -5,12 +5,13 @@ module SystemCheck
# whitelisted as it may change the SSH client's behaviour dramatically.
WHITELIST = %w[
authorized_keys
+ authorized_keys.lock
authorized_keys2
known_hosts
].freeze
set_name 'Git user has default SSH configuration?'
- set_skip_reason 'skipped (git user is not present or configured)'
+ set_skip_reason 'skipped (git user is not present / configured)'
def skip?
!home_dir || !File.directory?(home_dir)
diff --git a/lib/system_check/app/git_version_check.rb b/lib/system_check/app/git_version_check.rb
index c388682dfb4..44ec888c197 100644
--- a/lib/system_check/app/git_version_check.rb
+++ b/lib/system_check/app/git_version_check.rb
@@ -5,11 +5,11 @@ module SystemCheck
set_check_pass -> { "yes (#{self.current_version})" }
def self.required_version
- @required_version ||= Gitlab::VersionInfo.new(2, 7, 3)
+ @required_version ||= Gitlab::VersionInfo.new(2, 9, 5)
end
def self.current_version
- @current_version ||= Gitlab::VersionInfo.parse(run_command(%W(#{Gitlab.config.git.bin_path} --version)))
+ @current_version ||= Gitlab::VersionInfo.parse(Gitlab::TaskHelpers.run_command(%W(#{Gitlab.config.git.bin_path} --version)))
end
def check?
diff --git a/lib/system_check/app/ruby_version_check.rb b/lib/system_check/app/ruby_version_check.rb
index fd82f5f8a4a..57bbabece1f 100644
--- a/lib/system_check/app/ruby_version_check.rb
+++ b/lib/system_check/app/ruby_version_check.rb
@@ -5,11 +5,11 @@ module SystemCheck
set_check_pass -> { "yes (#{self.current_version})" }
def self.required_version
- @required_version ||= Gitlab::VersionInfo.new(2, 3, 3)
+ @required_version ||= Gitlab::VersionInfo.new(2, 3, 5)
end
def self.current_version
- @current_version ||= Gitlab::VersionInfo.parse(run_command(%w(ruby --version)))
+ @current_version ||= Gitlab::VersionInfo.parse(Gitlab::TaskHelpers.run_command(%w(ruby --version)))
end
def check?
diff --git a/lib/system_check/helpers.rb b/lib/system_check/helpers.rb
index c42ae4fe4c4..914ed794601 100644
--- a/lib/system_check/helpers.rb
+++ b/lib/system_check/helpers.rb
@@ -1,5 +1,3 @@
-require 'tasks/gitlab/task_helpers'
-
module SystemCheck
module Helpers
include ::Gitlab::TaskHelpers
diff --git a/lib/system_check/incoming_email/imap_authentication_check.rb b/lib/system_check/incoming_email/imap_authentication_check.rb
index dee108d987b..e55bea86d3f 100644
--- a/lib/system_check/incoming_email/imap_authentication_check.rb
+++ b/lib/system_check/incoming_email/imap_authentication_check.rb
@@ -4,22 +4,17 @@ module SystemCheck
set_name 'IMAP server credentials are correct?'
def check?
- if mailbox_config
- begin
- imap = Net::IMAP.new(config[:host], port: config[:port], ssl: config[:ssl])
- imap.starttls if config[:start_tls]
- imap.login(config[:email], config[:password])
- connected = true
- rescue
- connected = false
- end
+ if config
+ try_connect_imap
+ else
+ @error = "#{mail_room_config_path} does not have mailboxes setup"
+ false
end
-
- connected
end
def show_error
try_fixing_it(
+ "An error occurred: #{@error.class}: #{@error.message}",
'Check that the information in config/gitlab.yml is correct'
)
for_more_information(
@@ -30,15 +25,31 @@ module SystemCheck
private
- def mailbox_config
- return @config if @config
+ def try_connect_imap
+ imap = Net::IMAP.new(config[:host], port: config[:port], ssl: config[:ssl])
+ imap.starttls if config[:start_tls]
+ imap.login(config[:email], config[:password])
+ true
+ rescue => error
+ @error = error
+ false
+ end
+
+ def config
+ @config ||= load_config
+ end
+
+ def mail_room_config_path
+ @mail_room_config_path ||=
+ Rails.root.join('config', 'mail_room.yml').to_s
+ end
- config_path = Rails.root.join('config', 'mail_room.yml').to_s
- erb = ERB.new(File.read(config_path))
- erb.filename = config_path
+ def load_config
+ erb = ERB.new(File.read(mail_room_config_path))
+ erb.filename = mail_room_config_path
config_file = YAML.load(erb.result)
- @config = config_file[:mailboxes]&.first
+ config_file.dig(:mailboxes, 0)
end
end
end
diff --git a/lib/system_check/simple_executor.rb b/lib/system_check/simple_executor.rb
index 00221f77cf4..d268f501b4a 100644
--- a/lib/system_check/simple_executor.rb
+++ b/lib/system_check/simple_executor.rb
@@ -24,6 +24,7 @@ module SystemCheck
# @param [BaseCheck] check class
def <<(check)
raise ArgumentError unless check.is_a?(Class) && check < BaseCheck
+
@checks << check
end
@@ -65,6 +66,7 @@ module SystemCheck
if check.can_repair?
$stdout.print 'Trying to fix error automatically. ...'
+
if check.repair!
$stdout.puts 'Success'.color(:green)
return
diff --git a/lib/tasks/brakeman.rake b/lib/tasks/brakeman.rake
index 99b3168d9eb..2301ec9b228 100644
--- a/lib/tasks/brakeman.rake
+++ b/lib/tasks/brakeman.rake
@@ -2,7 +2,7 @@ desc 'Security check via brakeman'
task :brakeman do
# We get 0 warnings at level 'w3' but we would like to reach 'w2'. Merge
# requests are welcome!
- if system(*%w(brakeman --no-progress --skip-files lib/backup/repository.rb,app/controllers/unicorn_test_controller.rb -w3 -z))
+ if system(*%w(brakeman --no-progress --skip-files lib/backup/repository.rb -w3 -z))
puts 'Security check succeed'
else
puts 'Security check failed'
diff --git a/lib/tasks/dev.rake b/lib/tasks/dev.rake
index e65609d7001..4beb94eeb8e 100644
--- a/lib/tasks/dev.rake
+++ b/lib/tasks/dev.rake
@@ -7,4 +7,9 @@ namespace :dev do
Rake::Task["gitlab:setup"].invoke
Rake::Task["gitlab:shell:setup"].invoke
end
+
+ desc "GitLab | Eager load application"
+ task load: :environment do
+ Rails.application.eager_load!
+ end
end
diff --git a/lib/tasks/flay.rake b/lib/tasks/flay.rake
index 7ad2b2e4d39..4b4881cecb8 100644
--- a/lib/tasks/flay.rake
+++ b/lib/tasks/flay.rake
@@ -1,8 +1,8 @@
desc 'Code duplication analyze via flay'
task :flay do
- output = `bundle exec flay --mass 35 app/ lib/gitlab/`
+ output = `bundle exec flay --mass 35 app/ lib/gitlab/ 2> #{File::NULL}`
- if output.include? "Similar code found"
+ if output.include?("Similar code found") || output.include?("IDENTICAL code found")
puts output
exit 1
end
diff --git a/lib/tasks/gemojione.rake b/lib/tasks/gemojione.rake
index 87ca39b079b..c6942d22926 100644
--- a/lib/tasks/gemojione.rake
+++ b/lib/tasks/gemojione.rake
@@ -1,5 +1,28 @@
namespace :gemojione do
desc 'Generates Emoji SHA256 digests'
+
+ task aliases: ['yarn:check', 'environment'] do
+ require 'json'
+
+ aliases = {}
+
+ index_file = File.join(Rails.root, 'fixtures', 'emojis', 'index.json')
+ index = JSON.parse(File.read(index_file))
+
+ index.each_pair do |key, data|
+ data['aliases'].each do |a|
+ a.tr!(':', '')
+
+ aliases[a] = key
+ end
+ end
+
+ out = File.join(Rails.root, 'fixtures', 'emojis', 'aliases.json')
+ File.open(out, 'w') do |handle|
+ handle.write(JSON.pretty_generate(aliases, indent: ' ', space: '', space_before: ''))
+ end
+ end
+
task digests: ['yarn:check', 'environment'] do
require 'digest/sha2'
require 'json'
@@ -16,8 +39,13 @@ namespace :gemojione do
fpath = File.join(dir, "#{emoji_hash['unicode']}.png")
hash_digest = Digest::SHA256.file(fpath).hexdigest
+ category = emoji_hash['category']
+ if name == 'gay_pride_flag'
+ category = 'flags'
+ end
+
entry = {
- category: emoji_hash['category'],
+ category: category,
moji: emoji_hash['moji'],
description: emoji_hash['description'],
unicodeVersion: Gitlab::Emoji.emoji_unicode_version(name),
@@ -29,7 +57,6 @@ namespace :gemojione do
end
out = File.join(Rails.root, 'fixtures', 'emojis', 'digests.json')
-
File.open(out, 'w') do |handle|
handle.write(JSON.pretty_generate(resultant_emoji_map))
end
@@ -88,7 +115,7 @@ namespace :gemojione do
end
end
- style_path = Rails.root.join(*%w(app assets stylesheets framework emoji-sprites.scss))
+ style_path = Rails.root.join(*%w(app assets stylesheets framework emoji_sprites.scss))
# Combine the resized assets into a packed sprite and re-generate the SCSS
SpriteFactory.cssurl = "image-url('$IMAGE')"
diff --git a/lib/tasks/gettext.rake b/lib/tasks/gettext.rake
index 35ba729c156..247d7be7d78 100644
--- a/lib/tasks/gettext.rake
+++ b/lib/tasks/gettext.rake
@@ -23,6 +23,7 @@ namespace :gettext do
desc 'Lint all po files in `locale/'
task lint: :environment do
require 'simple_po_parser'
+ require 'gitlab/utils'
FastGettext.silence_errors
files = Dir.glob(Rails.root.join('locale/*/gitlab.po'))
diff --git a/lib/tasks/gitlab/assets.rake b/lib/tasks/gitlab/assets.rake
index 259a755d724..a42f02a84fd 100644
--- a/lib/tasks/gitlab/assets.rake
+++ b/lib/tasks/gitlab/assets.rake
@@ -3,8 +3,8 @@ namespace :gitlab do
desc 'GitLab | Assets | Compile all frontend assets'
task compile: [
'yarn:check',
- 'rake:assets:precompile',
'gettext:po_to_json',
+ 'rake:assets:precompile',
'webpack:compile',
'fix_urls'
]
diff --git a/lib/tasks/gitlab/backup.rake b/lib/tasks/gitlab/backup.rake
index 1650263b98d..24e37f6c6cc 100644
--- a/lib/tasks/gitlab/backup.rake
+++ b/lib/tasks/gitlab/backup.rake
@@ -4,7 +4,7 @@ namespace :gitlab do
namespace :backup do
# Create backup of GitLab system
desc "GitLab | Create a backup of the GitLab system"
- task create: :environment do
+ task create: :gitlab_environment do
warn_user_is_not_gitlab
configure_cron_mode
@@ -25,7 +25,7 @@ namespace :gitlab do
# Restore backup of GitLab system
desc 'GitLab | Restore a previously created backup'
- task restore: :environment do
+ task restore: :gitlab_environment do
warn_user_is_not_gitlab
configure_cron_mode
@@ -33,24 +33,30 @@ namespace :gitlab do
backup.unpack
unless backup.skipped?('db')
- unless ENV['force'] == 'yes'
- warning = <<-MSG.strip_heredoc
- Before restoring the database we recommend removing all existing
- tables to avoid future upgrade problems. Be aware that if you have
- custom tables in the GitLab database these tables and all data will be
- removed.
- MSG
- puts warning.color(:red)
- ask_to_continue
- puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
- sleep(5)
+ begin
+ unless ENV['force'] == 'yes'
+ warning = <<-MSG.strip_heredoc
+ Before restoring the database, we will remove all existing
+ tables to avoid future upgrade problems. Be aware that if you have
+ custom tables in the GitLab database these tables and all data will be
+ removed.
+ MSG
+ puts warning.color(:red)
+ ask_to_continue
+ puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
+ sleep(5)
+ end
+
+ # Drop all tables Load the schema to ensure we don't have any newer tables
+ # hanging out from a failed upgrade
+ $progress.puts 'Cleaning the database ... '.color(:blue)
+ Rake::Task['gitlab:db:drop_tables'].invoke
+ $progress.puts 'done'.color(:green)
+ Rake::Task['gitlab:backup:db:restore'].invoke
+ rescue Gitlab::TaskAbortedByUserError
+ puts "Quitting...".color(:red)
+ exit 1
end
- # Drop all tables Load the schema to ensure we don't have any newer tables
- # hanging out from a failed upgrade
- $progress.puts 'Cleaning the database ... '.color(:blue)
- Rake::Task['gitlab:db:drop_tables'].invoke
- $progress.puts 'done'.color(:green)
- Rake::Task['gitlab:backup:db:restore'].invoke
end
Rake::Task['gitlab:backup:repo:restore'].invoke unless backup.skipped?('repositories')
@@ -67,7 +73,7 @@ namespace :gitlab do
end
namespace :repo do
- task create: :environment do
+ task create: :gitlab_environment do
$progress.puts "Dumping repositories ...".color(:blue)
if ENV["SKIP"] && ENV["SKIP"].include?("repositories")
@@ -78,7 +84,7 @@ namespace :gitlab do
end
end
- task restore: :environment do
+ task restore: :gitlab_environment do
$progress.puts "Restoring repositories ...".color(:blue)
Backup::Repository.new.restore
$progress.puts "done".color(:green)
@@ -86,7 +92,7 @@ namespace :gitlab do
end
namespace :db do
- task create: :environment do
+ task create: :gitlab_environment do
$progress.puts "Dumping database ... ".color(:blue)
if ENV["SKIP"] && ENV["SKIP"].include?("db")
@@ -97,7 +103,7 @@ namespace :gitlab do
end
end
- task restore: :environment do
+ task restore: :gitlab_environment do
$progress.puts "Restoring database ... ".color(:blue)
Backup::Database.new.restore
$progress.puts "done".color(:green)
@@ -105,7 +111,7 @@ namespace :gitlab do
end
namespace :builds do
- task create: :environment do
+ task create: :gitlab_environment do
$progress.puts "Dumping builds ... ".color(:blue)
if ENV["SKIP"] && ENV["SKIP"].include?("builds")
@@ -116,7 +122,7 @@ namespace :gitlab do
end
end
- task restore: :environment do
+ task restore: :gitlab_environment do
$progress.puts "Restoring builds ... ".color(:blue)
Backup::Builds.new.restore
$progress.puts "done".color(:green)
@@ -124,7 +130,7 @@ namespace :gitlab do
end
namespace :uploads do
- task create: :environment do
+ task create: :gitlab_environment do
$progress.puts "Dumping uploads ... ".color(:blue)
if ENV["SKIP"] && ENV["SKIP"].include?("uploads")
@@ -135,7 +141,7 @@ namespace :gitlab do
end
end
- task restore: :environment do
+ task restore: :gitlab_environment do
$progress.puts "Restoring uploads ... ".color(:blue)
Backup::Uploads.new.restore
$progress.puts "done".color(:green)
@@ -143,7 +149,7 @@ namespace :gitlab do
end
namespace :artifacts do
- task create: :environment do
+ task create: :gitlab_environment do
$progress.puts "Dumping artifacts ... ".color(:blue)
if ENV["SKIP"] && ENV["SKIP"].include?("artifacts")
@@ -154,7 +160,7 @@ namespace :gitlab do
end
end
- task restore: :environment do
+ task restore: :gitlab_environment do
$progress.puts "Restoring artifacts ... ".color(:blue)
Backup::Artifacts.new.restore
$progress.puts "done".color(:green)
@@ -162,7 +168,7 @@ namespace :gitlab do
end
namespace :pages do
- task create: :environment do
+ task create: :gitlab_environment do
$progress.puts "Dumping pages ... ".color(:blue)
if ENV["SKIP"] && ENV["SKIP"].include?("pages")
@@ -173,7 +179,7 @@ namespace :gitlab do
end
end
- task restore: :environment do
+ task restore: :gitlab_environment do
$progress.puts "Restoring pages ... ".color(:blue)
Backup::Pages.new.restore
$progress.puts "done".color(:green)
@@ -181,7 +187,7 @@ namespace :gitlab do
end
namespace :lfs do
- task create: :environment do
+ task create: :gitlab_environment do
$progress.puts "Dumping lfs objects ... ".color(:blue)
if ENV["SKIP"] && ENV["SKIP"].include?("lfs")
@@ -192,7 +198,7 @@ namespace :gitlab do
end
end
- task restore: :environment do
+ task restore: :gitlab_environment do
$progress.puts "Restoring lfs objects ... ".color(:blue)
Backup::Lfs.new.restore
$progress.puts "done".color(:green)
@@ -200,7 +206,7 @@ namespace :gitlab do
end
namespace :registry do
- task create: :environment do
+ task create: :gitlab_environment do
$progress.puts "Dumping container registry images ... ".color(:blue)
if Gitlab.config.registry.enabled
@@ -215,8 +221,9 @@ namespace :gitlab do
end
end
- task restore: :environment do
+ task restore: :gitlab_environment do
$progress.puts "Restoring container registry images ... ".color(:blue)
+
if Gitlab.config.registry.enabled
Backup::Registry.new.restore
$progress.puts "done".color(:green)
diff --git a/lib/tasks/gitlab/check.rake b/lib/tasks/gitlab/check.rake
index dfade1f3885..2403f57f05a 100644
--- a/lib/tasks/gitlab/check.rake
+++ b/lib/tasks/gitlab/check.rake
@@ -1,7 +1,3 @@
-# Temporary hack, until we migrate all checks to SystemCheck format
-require 'system_check'
-require 'system_check/helpers'
-
namespace :gitlab do
desc 'GitLab | Check the configuration of GitLab and its environment'
task check: %w{gitlab:gitlab_shell:check
@@ -12,7 +8,7 @@ namespace :gitlab do
namespace :app do
desc 'GitLab | Check the configuration of the GitLab Rails app'
- task check: :environment do
+ task check: :gitlab_environment do
warn_user_is_not_gitlab
checks = [
@@ -43,7 +39,7 @@ namespace :gitlab do
namespace :gitlab_shell do
desc "GitLab | Check the configuration of GitLab Shell"
- task check: :environment do
+ task check: :gitlab_environment do
warn_user_is_not_gitlab
start_checking "GitLab Shell"
@@ -180,6 +176,7 @@ namespace :gitlab do
puts "can't check, you have no projects".color(:magenta)
return
end
+
puts ""
Project.find_each(batch_size: 100) do |project|
@@ -210,6 +207,7 @@ namespace :gitlab do
gitlab_shell_repo_base = gitlab_shell_path
check_cmd = File.expand_path('bin/check', gitlab_shell_repo_base)
puts "Running #{check_cmd}"
+
if system(check_cmd, chdir: gitlab_shell_repo_base)
puts 'gitlab-shell self-check successful'.color(:green)
else
@@ -249,7 +247,7 @@ namespace :gitlab do
namespace :sidekiq do
desc "GitLab | Check the configuration of Sidekiq"
- task check: :environment do
+ task check: :gitlab_environment do
warn_user_is_not_gitlab
start_checking "Sidekiq"
@@ -285,6 +283,7 @@ namespace :gitlab do
return if process_count.zero?
print 'Number of Sidekiq processes ... '
+
if process_count == 1
puts '1'.color(:green)
else
@@ -307,7 +306,7 @@ namespace :gitlab do
namespace :incoming_email do
desc "GitLab | Check the configuration of Reply by email"
- task check: :environment do
+ task check: :gitlab_environment do
warn_user_is_not_gitlab
if Gitlab.config.incoming_email.enabled
@@ -330,14 +329,14 @@ namespace :gitlab do
end
namespace :ldap do
- task :check, [:limit] => :environment do |_, args|
+ task :check, [:limit] => :gitlab_environment do |_, args|
# Only show up to 100 results because LDAP directories can be very big.
# This setting only affects the `rake gitlab:check` script.
args.with_defaults(limit: 100)
warn_user_is_not_gitlab
start_checking "LDAP"
- if Gitlab::LDAP::Config.enabled?
+ if Gitlab::Auth::LDAP::Config.enabled?
check_ldap(args.limit)
else
puts 'LDAP is disabled in config/gitlab.yml'
@@ -347,13 +346,13 @@ namespace :gitlab do
end
def check_ldap(limit)
- servers = Gitlab::LDAP::Config.providers
+ servers = Gitlab::Auth::LDAP::Config.providers
servers.each do |server|
puts "Server: #{server}"
begin
- Gitlab::LDAP::Adapter.open(server) do |adapter|
+ Gitlab::Auth::LDAP::Adapter.open(server) do |adapter|
check_ldap_auth(adapter)
puts "LDAP users with access to your GitLab server (only showing the first #{limit} results)"
@@ -386,21 +385,15 @@ namespace :gitlab do
namespace :repo do
desc "GitLab | Check the integrity of the repositories managed by GitLab"
- task check: :environment do
- Gitlab.config.repositories.storages.each do |name, repository_storage|
- namespace_dirs = Dir.glob(File.join(repository_storage['path'], '*'))
-
- namespace_dirs.each do |namespace_dir|
- repo_dirs = Dir.glob(File.join(namespace_dir, '*'))
- repo_dirs.each { |repo_dir| check_repo_integrity(repo_dir) }
- end
- end
+ task check: :gitlab_environment do
+ puts "This task is deprecated. Please use gitlab:git:fsck instead".color(:red)
+ Rake::Task["gitlab:git:fsck"].execute
end
end
namespace :orphans do
desc 'Gitlab | Check for orphaned namespaces and repositories'
- task check: :environment do
+ task check: :gitlab_environment do
warn_user_is_not_gitlab
checks = [
SystemCheck::Orphans::NamespaceCheck,
@@ -411,7 +404,7 @@ namespace :gitlab do
end
desc 'GitLab | Check for orphaned namespaces in the repositories path'
- task check_namespaces: :environment do
+ task check_namespaces: :gitlab_environment do
warn_user_is_not_gitlab
checks = [SystemCheck::Orphans::NamespaceCheck]
@@ -419,7 +412,7 @@ namespace :gitlab do
end
desc 'GitLab | Check for orphaned repositories in the repositories path'
- task check_repositories: :environment do
+ task check_repositories: :gitlab_environment do
warn_user_is_not_gitlab
checks = [SystemCheck::Orphans::RepositoryCheck]
@@ -429,8 +422,8 @@ namespace :gitlab do
namespace :user do
desc "GitLab | Check the integrity of a specific user's repositories"
- task :check_repos, [:username] => :environment do |t, args|
- username = args[:username] || prompt("Check repository integrity for fsername? ".color(:blue))
+ task :check_repos, [:username] => :gitlab_environment do |t, args|
+ username = args[:username] || prompt("Check repository integrity for username? ".color(:blue))
user = User.find_by(username: username)
if user
repo_dirs = user.authorized_projects.map do |p|
@@ -461,35 +454,4 @@ namespace :gitlab do
puts "FAIL. Please update gitlab-shell to #{required_version} from #{current_version}".color(:red)
end
end
-
- def check_repo_integrity(repo_dir)
- puts "\nChecking repo at #{repo_dir.color(:yellow)}"
-
- git_fsck(repo_dir)
- check_config_lock(repo_dir)
- check_ref_locks(repo_dir)
- end
-
- def git_fsck(repo_dir)
- puts "Running `git fsck`".color(:yellow)
- system(*%W(#{Gitlab.config.git.bin_path} fsck), chdir: repo_dir)
- end
-
- def check_config_lock(repo_dir)
- config_exists = File.exist?(File.join(repo_dir, 'config.lock'))
- config_output = config_exists ? 'yes'.color(:red) : 'no'.color(:green)
- puts "'config.lock' file exists?".color(:yellow) + " ... #{config_output}"
- end
-
- def check_ref_locks(repo_dir)
- lock_files = Dir.glob(File.join(repo_dir, 'refs/heads/*.lock'))
- if lock_files.present?
- puts "Ref lock files exist:".color(:red)
- lock_files.each do |lock_file|
- puts " #{lock_file}"
- end
- else
- puts "No ref lock files exist".color(:green)
- end
- end
end
diff --git a/lib/tasks/gitlab/cleanup.rake b/lib/tasks/gitlab/cleanup.rake
index 8ae1b6a626a..2453079911d 100644
--- a/lib/tasks/gitlab/cleanup.rake
+++ b/lib/tasks/gitlab/cleanup.rake
@@ -1,11 +1,16 @@
+# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/954
+#
namespace :gitlab do
namespace :cleanup do
+ HASHED_REPOSITORY_NAME = '@hashed'.freeze
+
desc "GitLab | Cleanup | Clean namespaces"
- task dirs: :environment do
+ task dirs: :gitlab_environment do
warn_user_is_not_gitlab
remove_flag = ENV['REMOVE']
- namespaces = Namespace.pluck(:path)
+ namespaces = Namespace.pluck(:path)
+ namespaces << HASHED_REPOSITORY_NAME # add so that it will be ignored
Gitlab.config.repositories.storages.each do |name, repository_storage|
git_base_path = repository_storage['path']
all_dirs = Dir.glob(git_base_path + '/*')
@@ -44,7 +49,7 @@ namespace :gitlab do
end
desc "GitLab | Cleanup | Clean repositories"
- task repos: :environment do
+ task repos: :gitlab_environment do
warn_user_is_not_gitlab
move_suffix = "+orphaned+#{Time.now.to_i}"
@@ -59,7 +64,11 @@ namespace :gitlab do
.sub(%r{^/*}, '')
.chomp('.git')
.chomp('.wiki')
- next if Project.find_by_full_path(repo_with_namespace)
+
+ # TODO ignoring hashed repositories for now. But revisit to fully support
+ # possible orphaned hashed repos
+ next if repo_with_namespace.start_with?("#{HASHED_REPOSITORY_NAME}/") || Project.find_by_full_path(repo_with_namespace)
+
new_path = path + move_suffix
puts path.inspect + ' -> ' + new_path.inspect
File.rename(path, new_path)
@@ -69,14 +78,16 @@ namespace :gitlab do
end
desc "GitLab | Cleanup | Block users that have been removed in LDAP"
- task block_removed_ldap_users: :environment do
+ task block_removed_ldap_users: :gitlab_environment do
warn_user_is_not_gitlab
block_flag = ENV['BLOCK']
User.find_each do |user|
next unless user.ldap_user?
+
print "#{user.name} (#{user.ldap_identity.extern_uid}) ..."
- if Gitlab::LDAP::Access.allowed?(user)
+
+ if Gitlab::Auth::LDAP::Access.allowed?(user)
puts " [OK]".color(:green)
else
if block_flag
@@ -98,7 +109,7 @@ namespace :gitlab do
# released. So likely this should only be run once on gitlab.com
# Faulty refs are moved so they are kept around, else some features break.
desc 'GitLab | Cleanup | Remove faulty deployment refs'
- task move_faulty_deployment_refs: :environment do
+ task move_faulty_deployment_refs: :gitlab_environment do
projects = Project.where(id: Deployment.select(:project_id).distinct)
projects.find_each do |project|
diff --git a/lib/tasks/gitlab/dev.rake b/lib/tasks/gitlab/dev.rake
index 3eade7bf553..77c28615856 100644
--- a/lib/tasks/gitlab/dev.rake
+++ b/lib/tasks/gitlab/dev.rake
@@ -4,12 +4,17 @@ namespace :gitlab do
task :ee_compat_check, [:branch] => :environment do |_, args|
opts =
if ENV['CI']
- { branch: ENV['CI_COMMIT_REF_NAME'] }
+ {
+ ce_project_url: ENV['CI_PROJECT_URL'],
+ branch: ENV['CI_COMMIT_REF_NAME'],
+ job_id: ENV['CI_JOB_ID']
+ }
else
unless args[:branch]
puts "Must specify a branch as an argument".color(:red)
exit 1
end
+
args
end
diff --git a/lib/tasks/gitlab/git.rake b/lib/tasks/gitlab/git.rake
index cf82134d97e..cb4f7e5c8a8 100644
--- a/lib/tasks/gitlab/git.rake
+++ b/lib/tasks/gitlab/git.rake
@@ -1,7 +1,7 @@
namespace :gitlab do
namespace :git do
desc "GitLab | Git | Repack"
- task repack: :environment do
+ task repack: :gitlab_environment do
failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} repack -a --quiet), "Repacking repo")
if failures.empty?
puts "Done".color(:green)
@@ -11,7 +11,7 @@ namespace :gitlab do
end
desc "GitLab | Git | Run garbage collection on all repos"
- task gc: :environment do
+ task gc: :gitlab_environment do
failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} gc --auto --quiet), "Garbage Collecting")
if failures.empty?
puts "Done".color(:green)
@@ -21,7 +21,7 @@ namespace :gitlab do
end
desc "GitLab | Git | Prune all repos"
- task prune: :environment do
+ task prune: :gitlab_environment do
failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} prune), "Git Prune")
if failures.empty?
puts "Done".color(:green)
@@ -30,6 +30,20 @@ namespace :gitlab do
end
end
+ desc 'GitLab | Git | Check all repos integrity'
+ task fsck: :gitlab_environment do
+ failures = perform_git_cmd(%W(#{Gitlab.config.git.bin_path} fsck --name-objects --no-progress), "Checking integrity") do |repo|
+ check_config_lock(repo)
+ check_ref_locks(repo)
+ end
+
+ if failures.empty?
+ puts "Done".color(:green)
+ else
+ output_failures(failures)
+ end
+ end
+
def perform_git_cmd(cmd, message)
puts "Starting #{message} on all repositories"
@@ -40,6 +54,8 @@ namespace :gitlab do
else
failures << repo
end
+
+ yield(repo) if block_given?
end
failures
@@ -49,5 +65,24 @@ namespace :gitlab do
puts "The following repositories reported errors:".color(:red)
failures.each { |f| puts "- #{f}" }
end
+
+ def check_config_lock(repo_dir)
+ config_exists = File.exist?(File.join(repo_dir, 'config.lock'))
+ config_output = config_exists ? 'yes'.color(:red) : 'no'.color(:green)
+
+ puts "'config.lock' file exists?".color(:yellow) + " ... #{config_output}"
+ end
+
+ def check_ref_locks(repo_dir)
+ lock_files = Dir.glob(File.join(repo_dir, 'refs/heads/*.lock'))
+
+ if lock_files.present?
+ puts "Ref lock files exist:".color(:red)
+
+ lock_files.each { |lock_file| puts " #{lock_file}" }
+ else
+ puts "No ref lock files exist".color(:green)
+ end
+ end
end
end
diff --git a/lib/tasks/gitlab/gitaly.rake b/lib/tasks/gitlab/gitaly.rake
index 08677a98fc1..e9ca6404fe8 100644
--- a/lib/tasks/gitlab/gitaly.rake
+++ b/lib/tasks/gitlab/gitaly.rake
@@ -1,86 +1,51 @@
namespace :gitlab do
namespace :gitaly do
desc "GitLab | Install or upgrade gitaly"
- task :install, [:dir, :repo] => :environment do |t, args|
- require 'toml'
+ task :install, [:dir, :repo] => :gitlab_environment do |t, args|
+ require 'toml-rb'
warn_user_is_not_gitlab
+
unless args.dir.present?
abort %(Please specify the directory where you want to install gitaly:\n rake "gitlab:gitaly:install[/home/git/gitaly]")
end
+
args.with_defaults(repo: 'https://gitlab.com/gitlab-org/gitaly.git')
version = Gitlab::GitalyClient.expected_server_version
checkout_or_clone_version(version: version, repo: args.repo, target_dir: args.dir)
+ command = %w[/usr/bin/env -u RUBYOPT -u BUNDLE_GEMFILE]
+
_, status = Gitlab::Popen.popen(%w[which gmake])
- command = status.zero? ? ['gmake'] : ['make']
+ command << (status.zero? ? 'gmake' : 'make')
if Rails.env.test?
- command += %W[BUNDLE_PATH=#{Bundler.bundle_path}]
+ command.push(
+ 'BUNDLE_FLAGS=--no-deployment',
+ "BUNDLE_PATH=#{Bundler.bundle_path}")
end
+ Gitlab::SetupHelper.create_gitaly_configuration(args.dir)
Dir.chdir(args.dir) do
- create_gitaly_configuration
# In CI we run scripts/gitaly-test-build instead of this command
unless ENV['CI'].present?
- Bundler.with_original_env { run_command!(%w[/usr/bin/env -u RUBYOPT -u BUNDLE_GEMFILE] + command) }
+ Bundler.with_original_env { run_command!(command) }
end
end
end
desc "GitLab | Print storage configuration in TOML format"
task storage_config: :environment do
- require 'toml'
+ require 'toml-rb'
puts "# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}"
puts "# This is in TOML format suitable for use in Gitaly's config.toml file."
# Exclude gitaly-ruby configuration because that depends on the gitaly
# installation directory.
- puts gitaly_configuration_toml(gitaly_ruby: false)
- end
-
- private
-
- # We cannot create config.toml files for all possible Gitaly configuations.
- # For instance, if Gitaly is running on another machine then it makes no
- # sense to write a config.toml file on the current machine. This method will
- # only generate a configuration for the most common and simplest case: when
- # we have exactly one Gitaly process and we are sure it is running locally
- # because it uses a Unix socket.
- def gitaly_configuration_toml(gitaly_ruby: true)
- storages = []
- address = nil
-
- Gitlab.config.repositories.storages.each do |key, val|
- if address
- if address != val['gitaly_address']
- raise ArgumentError, "Your gitlab.yml contains more than one gitaly_address."
- end
- elsif URI(val['gitaly_address']).scheme != 'unix'
- raise ArgumentError, "Automatic config.toml generation only supports 'unix:' addresses."
- else
- address = val['gitaly_address']
- end
-
- storages << { name: key, path: val['path'] }
- end
- config = { socket_path: address.sub(%r{\Aunix:}, ''), storage: storages }
- config[:auth] = { token: 'secret' } if Rails.env.test?
- config[:'gitaly-ruby'] = { dir: File.join(Dir.pwd, 'ruby') } if gitaly_ruby
- config[:'gitlab-shell'] = { dir: Gitlab.config.gitlab_shell.path }
- TOML.dump(config)
- end
-
- def create_gitaly_configuration
- File.open("config.toml", "w") do |f|
- f.puts gitaly_configuration_toml
- end
- rescue ArgumentError => e
- puts "Skipping config.toml generation:"
- puts e.message
+ puts Gitlab::SetupHelper.gitaly_configuration_toml('', gitaly_ruby: false)
end
end
end
diff --git a/lib/tasks/gitlab/helpers.rake b/lib/tasks/gitlab/helpers.rake
index b0a24790c4a..14d1125a03d 100644
--- a/lib/tasks/gitlab/helpers.rake
+++ b/lib/tasks/gitlab/helpers.rake
@@ -1,8 +1,6 @@
-require 'tasks/gitlab/task_helpers'
-
# Prevent StateMachine warnings from outputting during a cron task
StateMachines::Machine.ignore_method_conflicts = true if ENV['CRON']
-namespace :gitlab do
+task gitlab_environment: :environment do
extend SystemCheck::Helpers
end
diff --git a/lib/tasks/gitlab/import.rake b/lib/tasks/gitlab/import.rake
index d227a0c8bdb..adfcc3cda22 100644
--- a/lib/tasks/gitlab/import.rake
+++ b/lib/tasks/gitlab/import.rake
@@ -2,23 +2,21 @@ namespace :gitlab do
namespace :import do
# How to use:
#
- # 1. copy the bare repos under the repository storage paths (commonly the default path is /home/git/repositories)
- # 2. run: bundle exec rake gitlab:import:repos RAILS_ENV=production
+ # 1. copy the bare repos to a specific path that contain the group or subgroups structure as folders
+ # 2. run: bundle exec rake gitlab:import:repos[/path/to/repos] RAILS_ENV=production
#
# Notes:
# * The project owner will set to the first administator of the system
# * Existing projects will be skipped
- #
- #
desc "GitLab | Import bare repositories from repositories -> storages into GitLab project instance"
- task repos: :environment do
- if Project.current_application_settings.hashed_storage_enabled
- puts 'Cannot import repositories when Hashed Storage is enabled'.color(:red)
+ task :repos, [:import_path] => :environment do |_t, args|
+ unless args.import_path
+ puts 'Please specify an import path that contains the repositories'.color(:red)
exit 1
end
- Gitlab::BareRepositoryImporter.execute
+ Gitlab::BareRepositoryImport::Importer.execute(args.import_path)
end
end
end
diff --git a/lib/tasks/gitlab/info.rake b/lib/tasks/gitlab/info.rake
index e9fb6a008b0..45e9a1a1c72 100644
--- a/lib/tasks/gitlab/info.rake
+++ b/lib/tasks/gitlab/info.rake
@@ -1,7 +1,7 @@
namespace :gitlab do
namespace :env do
desc "GitLab | Show information about GitLab and its environment"
- task info: :environment do
+ task info: :gitlab_environment do
# check if there is an RVM environment
rvm_version = run_and_match(%w(rvm --version), /[\d\.]+/).try(:to_s)
# check Ruby version
diff --git a/lib/tasks/gitlab/lfs/check.rake b/lib/tasks/gitlab/lfs/check.rake
new file mode 100644
index 00000000000..869463d4e5d
--- /dev/null
+++ b/lib/tasks/gitlab/lfs/check.rake
@@ -0,0 +1,8 @@
+namespace :gitlab do
+ namespace :lfs do
+ desc 'GitLab | LFS | Check integrity of uploaded LFS objects'
+ task check: :environment do
+ Gitlab::Verify::RakeTask.run!(Gitlab::Verify::LfsObjects)
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/list_repos.rake b/lib/tasks/gitlab/list_repos.rake
index b732db9db6e..d7f28691098 100644
--- a/lib/tasks/gitlab/list_repos.rake
+++ b/lib/tasks/gitlab/list_repos.rake
@@ -8,6 +8,7 @@ namespace :gitlab do
namespace_ids = Namespace.where(['updated_at > ?', date]).pluck(:id).sort
scope = scope.where('id IN (?) OR namespace_id in (?)', project_ids, namespace_ids)
end
+
scope.find_each do |project|
base = File.join(project.repository_storage_path, project.disk_path)
puts base + '.git'
diff --git a/lib/tasks/gitlab/setup.rake b/lib/tasks/gitlab/setup.rake
index 05fcb8e3da5..1d903c81358 100644
--- a/lib/tasks/gitlab/setup.rake
+++ b/lib/tasks/gitlab/setup.rake
@@ -1,6 +1,6 @@
namespace :gitlab do
desc "GitLab | Setup production application"
- task setup: :environment do
+ task setup: :gitlab_environment do
setup_db
end
diff --git a/lib/tasks/gitlab/shell.rake b/lib/tasks/gitlab/shell.rake
index 42825f29e32..844664b12d4 100644
--- a/lib/tasks/gitlab/shell.rake
+++ b/lib/tasks/gitlab/shell.rake
@@ -1,7 +1,7 @@
namespace :gitlab do
namespace :shell do
desc "GitLab | Install or upgrade gitlab-shell"
- task :install, [:repo] => :environment do |t, args|
+ task :install, [:repo] => :gitlab_environment do |t, args|
warn_user_is_not_gitlab
default_version = Gitlab::Shell.version_required
@@ -54,32 +54,22 @@ namespace :gitlab do
# (Re)create hooks
Rake::Task['gitlab:shell:create_hooks'].invoke
- # Required for debian packaging with PKGR: Setup .ssh/environment with
- # the current PATH, so that the correct ruby version gets loaded
- # Requires to set "PermitUserEnvironment yes" in sshd config (should not
- # be an issue since it is more than likely that there are no "normal"
- # user accounts on a gitlab server). The alternative is for the admin to
- # install a ruby (1.9.3+) in the global path.
- File.open(File.join(user_home, ".ssh", "environment"), "w+") do |f|
- f.puts "PATH=#{ENV['PATH']}"
- end
-
Gitlab::Shell.ensure_secret_token!
end
desc "GitLab | Setup gitlab-shell"
- task setup: :environment do
+ task setup: :gitlab_environment do
setup
end
desc "GitLab | Build missing projects"
- task build_missing_projects: :environment do
+ task build_missing_projects: :gitlab_environment do
Project.find_each(batch_size: 1000) do |project|
path_to_repo = project.repository.path_to_repo
if File.exist?(path_to_repo)
print '-'
else
- if Gitlab::Shell.new.add_repository(project.repository_storage_path,
+ if Gitlab::Shell.new.add_repository(project.repository_storage,
project.disk_path)
print '.'
else
@@ -90,7 +80,7 @@ namespace :gitlab do
end
desc 'Create or repair repository hooks symlink'
- task create_hooks: :environment do
+ task create_hooks: :gitlab_environment do
warn_user_is_not_gitlab
puts 'Creating/Repairing hooks symlinks for all repositories'
diff --git a/lib/tasks/gitlab/sidekiq.rake b/lib/tasks/gitlab/sidekiq.rake
deleted file mode 100644
index 6cbc83b8973..00000000000
--- a/lib/tasks/gitlab/sidekiq.rake
+++ /dev/null
@@ -1,47 +0,0 @@
-namespace :gitlab do
- namespace :sidekiq do
- QUEUE = 'queue:post_receive'.freeze
-
- desc 'Drop all Sidekiq PostReceive jobs for a given project'
- task :drop_post_receive, [:project] => :environment do |t, args|
- unless args.project.present?
- abort "Please specify the project you want to drop PostReceive jobs for:\n rake gitlab:sidekiq:drop_post_receive[group/project]"
- end
- project_path = Project.find_by_full_path(args.project).repository.path_to_repo
-
- Sidekiq.redis do |redis|
- unless redis.exists(QUEUE)
- abort "Queue #{QUEUE} is empty"
- end
-
- temp_queue = "#{QUEUE}_#{Time.now.to_i}"
- redis.rename(QUEUE, temp_queue)
-
- # At this point, then post_receive queue is empty. It may be receiving
- # new jobs already. We will repopulate it with the old jobs, skipping the
- # ones we want to drop.
- dropped = 0
- while (job = redis.lpop(temp_queue))
- if repo_path(job) == project_path
- dropped += 1
- else
- redis.rpush(QUEUE, job)
- end
- end
- # The temp_queue will delete itself after we have popped all elements
- # from it
-
- puts "Dropped #{dropped} jobs containing #{project_path} from #{QUEUE}"
- end
- end
-
- def repo_path(job)
- job_args = JSON.parse(job)['args']
- if job_args
- job_args.first
- else
- nil
- end
- end
- end
-end
diff --git a/lib/tasks/gitlab/storage.rake b/lib/tasks/gitlab/storage.rake
new file mode 100644
index 00000000000..8ac73bc8ff2
--- /dev/null
+++ b/lib/tasks/gitlab/storage.rake
@@ -0,0 +1,140 @@
+namespace :gitlab do
+ namespace :storage do
+ desc 'GitLab | Storage | Migrate existing projects to Hashed Storage'
+ task migrate_to_hashed: :environment do
+ legacy_projects_count = Project.with_unmigrated_storage.count
+
+ if legacy_projects_count == 0
+ puts 'There are no projects requiring storage migration. Nothing to do!'
+
+ next
+ end
+
+ print "Enqueuing migration of #{legacy_projects_count} projects in batches of #{batch_size}"
+
+ project_id_batches do |start, finish|
+ StorageMigratorWorker.perform_async(start, finish)
+
+ print '.'
+ end
+
+ puts ' Done!'
+ end
+
+ desc 'Gitlab | Storage | Summary of existing projects using Legacy Storage'
+ task legacy_projects: :environment do
+ relation_summary('projects', Project.without_storage_feature(:repository))
+ end
+
+ desc 'Gitlab | Storage | List existing projects using Legacy Storage'
+ task list_legacy_projects: :environment do
+ projects_list('projects using Legacy Storage', Project.without_storage_feature(:repository))
+ end
+
+ desc 'Gitlab | Storage | Summary of existing projects using Hashed Storage'
+ task hashed_projects: :environment do
+ relation_summary('projects using Hashed Storage', Project.with_storage_feature(:repository))
+ end
+
+ desc 'Gitlab | Storage | List existing projects using Hashed Storage'
+ task list_hashed_projects: :environment do
+ projects_list('projects using Hashed Storage', Project.with_storage_feature(:repository))
+ end
+
+ desc 'Gitlab | Storage | Summary of project attachments using Legacy Storage'
+ task legacy_attachments: :environment do
+ relation_summary('attachments using Legacy Storage', legacy_attachments_relation)
+ end
+
+ desc 'Gitlab | Storage | List existing project attachments using Legacy Storage'
+ task list_legacy_attachments: :environment do
+ attachments_list('attachments using Legacy Storage', legacy_attachments_relation)
+ end
+
+ desc 'Gitlab | Storage | Summary of project attachments using Hashed Storage'
+ task hashed_attachments: :environment do
+ relation_summary('attachments using Hashed Storage', hashed_attachments_relation)
+ end
+
+ desc 'Gitlab | Storage | List existing project attachments using Hashed Storage'
+ task list_hashed_attachments: :environment do
+ attachments_list('attachments using Hashed Storage', hashed_attachments_relation)
+ end
+
+ def batch_size
+ ENV.fetch('BATCH', 200).to_i
+ end
+
+ def project_id_batches(&block)
+ Project.with_unmigrated_storage.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches
+ ids = relation.pluck(:id)
+
+ yield ids.min, ids.max
+ end
+ end
+
+ def legacy_attachments_relation
+ Upload.joins(<<~SQL).where('projects.storage_version < :version OR projects.storage_version IS NULL', version: Project::HASHED_STORAGE_FEATURES[:attachments])
+ JOIN projects
+ ON (uploads.model_type='Project' AND uploads.model_id=projects.id)
+ SQL
+ end
+
+ def hashed_attachments_relation
+ Upload.joins(<<~SQL).where('projects.storage_version >= :version', version: Project::HASHED_STORAGE_FEATURES[:attachments])
+ JOIN projects
+ ON (uploads.model_type='Project' AND uploads.model_id=projects.id)
+ SQL
+ end
+
+ def relation_summary(relation_name, relation)
+ relation_count = relation.count
+ puts "* Found #{relation_count} #{relation_name}".color(:green)
+
+ relation_count
+ end
+
+ def projects_list(relation_name, relation)
+ relation_count = relation_summary(relation_name, relation)
+
+ projects = relation.with_route
+ limit = ENV.fetch('LIMIT', 500).to_i
+
+ return unless relation_count > 0
+
+ puts " ! Displaying first #{limit} #{relation_name}..." if relation_count > limit
+
+ counter = 0
+ projects.find_in_batches(batch_size: batch_size) do |batch|
+ batch.each do |project|
+ counter += 1
+
+ puts " - #{project.full_path} (id: #{project.id})".color(:red)
+
+ return if counter >= limit # rubocop:disable Lint/NonLocalExitFromIterator
+ end
+ end
+ end
+
+ def attachments_list(relation_name, relation)
+ relation_count = relation_summary(relation_name, relation)
+
+ limit = ENV.fetch('LIMIT', 500).to_i
+
+ return unless relation_count > 0
+
+ puts " ! Displaying first #{limit} #{relation_name}..." if relation_count > limit
+
+ counter = 0
+ relation.find_in_batches(batch_size: batch_size) do |batch|
+ batch.each do |upload|
+ counter += 1
+
+ puts " - #{upload.path} (id: #{upload.id})".color(:red)
+
+ return if counter >= limit # rubocop:disable Lint/NonLocalExitFromIterator
+ end
+ end
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/tcp_check.rake b/lib/tasks/gitlab/tcp_check.rake
new file mode 100644
index 00000000000..1400f57d6b9
--- /dev/null
+++ b/lib/tasks/gitlab/tcp_check.rake
@@ -0,0 +1,20 @@
+namespace :gitlab do
+ desc "GitLab | Check TCP connectivity to a specific host and port"
+ task :tcp_check, [:host, :port] => :environment do |_t, args|
+ unless args.host && args.port
+ puts "Please specify a host and port: `rake gitlab:tcp_check[example.com,80]`".color(:red)
+ exit 1
+ end
+
+ checker = Gitlab::TcpChecker.new(args.host, args.port)
+
+ if checker.check
+ puts "TCP connection from #{checker.local} to #{checker.remote} succeeded".color(:green)
+ else
+ puts "TCP connection to #{checker.remote} failed: #{checker.error}".color(:red)
+ puts
+ puts 'Check that host and port are correct, and that the traffic is permitted through any firewalls.'
+ exit 1
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/traces.rake b/lib/tasks/gitlab/traces.rake
new file mode 100644
index 00000000000..fd2a4f2d11a
--- /dev/null
+++ b/lib/tasks/gitlab/traces.rake
@@ -0,0 +1,24 @@
+require 'logger'
+require 'resolv-replace'
+
+desc "GitLab | Archive legacy traces to trace artifacts"
+namespace :gitlab do
+ namespace :traces do
+ task archive: :environment do
+ logger = Logger.new(STDOUT)
+ logger.info('Archiving legacy traces')
+
+ Ci::Build.finished
+ .where('NOT EXISTS (?)',
+ Ci::JobArtifact.select(1).trace.where('ci_builds.id = ci_job_artifacts.job_id'))
+ .order(id: :asc)
+ .find_in_batches(batch_size: 1000) do |jobs|
+ job_ids = jobs.map { |job| [job.id] }
+
+ ArchiveTraceWorker.bulk_perform_async(job_ids)
+
+ logger.info("Scheduled #{job_ids.count} jobs. From #{job_ids.min} to #{job_ids.max}")
+ end
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/update_templates.rake b/lib/tasks/gitlab/update_templates.rake
index f44abc2b81b..a25f7ce59c7 100644
--- a/lib/tasks/gitlab/update_templates.rake
+++ b/lib/tasks/gitlab/update_templates.rake
@@ -10,6 +10,7 @@ namespace :gitlab do
puts "This rake task is not meant fo production instances".red
exit(1)
end
+
admin = User.find_by(admin: true)
unless admin
diff --git a/lib/tasks/gitlab/uploads/check.rake b/lib/tasks/gitlab/uploads/check.rake
new file mode 100644
index 00000000000..2be2ec7f9c9
--- /dev/null
+++ b/lib/tasks/gitlab/uploads/check.rake
@@ -0,0 +1,8 @@
+namespace :gitlab do
+ namespace :uploads do
+ desc 'GitLab | Uploads | Check integrity of uploaded files'
+ task check: :environment do
+ Gitlab::Verify::RakeTask.run!(Gitlab::Verify::Uploads)
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/users.rake b/lib/tasks/gitlab/users.rake
deleted file mode 100644
index 3a16ace60bd..00000000000
--- a/lib/tasks/gitlab/users.rake
+++ /dev/null
@@ -1,11 +0,0 @@
-namespace :gitlab do
- namespace :users do
- desc "GitLab | Clear the authentication token for all users"
- task clear_all_authentication_tokens: :environment do |t, args|
- # Do small batched updates because these updates will be slow and locking
- User.select(:id).find_in_batches(batch_size: 100) do |batch|
- User.where(id: batch.map(&:id)).update_all(authentication_token: nil)
- end
- end
- end
-end
diff --git a/lib/tasks/gitlab/workhorse.rake b/lib/tasks/gitlab/workhorse.rake
index e7ac0b5859f..b917a293095 100644
--- a/lib/tasks/gitlab/workhorse.rake
+++ b/lib/tasks/gitlab/workhorse.rake
@@ -1,11 +1,13 @@
namespace :gitlab do
namespace :workhorse do
desc "GitLab | Install or upgrade gitlab-workhorse"
- task :install, [:dir, :repo] => :environment do |t, args|
+ task :install, [:dir, :repo] => :gitlab_environment do |t, args|
warn_user_is_not_gitlab
+
unless args.dir.present?
abort %(Please specify the directory where you want to install gitlab-workhorse:\n rake "gitlab:workhorse:install[/home/git/gitlab-workhorse]")
end
+
args.with_defaults(repo: 'https://gitlab.com/gitlab-org/gitlab-workhorse.git')
version = Gitlab::Workhorse.version
diff --git a/lib/tasks/haml-lint.rake b/lib/tasks/haml-lint.rake
index ad2d034b0b4..5c0cc4990fc 100644
--- a/lib/tasks/haml-lint.rake
+++ b/lib/tasks/haml-lint.rake
@@ -2,5 +2,14 @@ unless Rails.env.production?
require 'haml_lint/rake_task'
require 'haml_lint/inline_javascript'
+ # Workaround for warnings from parser/current
+ # TODO: Remove this after we update parser gem
+ task :haml_lint do
+ require 'parser'
+ def Parser.warn(*args)
+ puts(*args) # static-analysis ignores stdout if status is 0
+ end
+ end
+
HamlLint::RakeTask.new
end
diff --git a/lib/tasks/import.rake b/lib/tasks/import.rake
index 4d485108cf6..aafbe52e5f8 100644
--- a/lib/tasks/import.rake
+++ b/lib/tasks/import.rake
@@ -7,14 +7,16 @@ class GithubImport
end
def initialize(token, gitlab_username, project_path, extras)
- @options = { token: token, verbose: true }
+ @options = { token: token }
@project_path = project_path
@current_user = User.find_by_username(gitlab_username)
@github_repo = extras.empty? ? nil : extras.first
end
def run!
- @repo = GithubRepos.new(@options, @current_user, @github_repo).choose_one!
+ @repo = GithubRepos
+ .new(@options[:token], @current_user, @github_repo)
+ .choose_one!
raise 'No repo found!' unless @repo
@@ -28,7 +30,7 @@ class GithubImport
private
def show_warning!
- puts "This will import GitHub #{@repo['full_name'].bright} into GitLab #{@project_path.bright} as #{@current_user.name}"
+ puts "This will import GitHub #{@repo.full_name.bright} into GitLab #{@project_path.bright} as #{@current_user.name}"
puts "Permission checks are ignored. Press any key to continue.".color(:red)
STDIN.getch
@@ -39,13 +41,21 @@ class GithubImport
def import!
@project.force_import_start
+ import_success = false
+
timings = Benchmark.measure do
- Github::Import.new(@project, @options).execute
+ import_success = Gitlab::GithubImport::SequentialImporter
+ .new(@project, token: @options[:token])
+ .execute
end
- puts "Import finished. Timings: #{timings}".color(:green)
-
- @project.import_finish
+ if import_success
+ @project.import_finish
+ puts "Import finished. Timings: #{timings}".color(:green)
+ else
+ puts "Import was not successful. Errors were as follows:"
+ puts @project.import_error
+ end
end
def new_project
@@ -53,18 +63,23 @@ class GithubImport
namespace_path, _sep, name = @project_path.rpartition('/')
namespace = find_or_create_namespace(namespace_path)
- Projects::CreateService.new(
+ project = Projects::CreateService.new(
@current_user,
name: name,
path: name,
- description: @repo['description'],
+ description: @repo.description,
namespace_id: namespace.id,
visibility_level: visibility_level,
- import_type: 'github',
- import_source: @repo['full_name'],
- import_url: @repo['clone_url'].sub('://', "://#{@options[:token]}@"),
- skip_wiki: @repo['has_wiki']
+ skip_wiki: @repo.has_wiki
).execute
+
+ project.update!(
+ import_type: 'github',
+ import_source: @repo.full_name,
+ import_url: @repo.clone_url.sub('://', "://#{@options[:token]}@")
+ )
+
+ project
end
end
@@ -80,13 +95,15 @@ class GithubImport
end
def visibility_level
- @repo['private'] ? Gitlab::VisibilityLevel::PRIVATE : Gitlab::CurrentSettings.current_application_settings.default_project_visibility
+ @repo.private ? Gitlab::VisibilityLevel::PRIVATE : Gitlab::CurrentSettings.current_application_settings.default_project_visibility
end
end
class GithubRepos
- def initialize(options, current_user, github_repo)
- @options = options
+ def initialize(token, current_user, github_repo)
+ @client = Gitlab::GithubImport::Client.new(token)
+ @client.octokit.auto_paginate = true
+
@current_user = current_user
@github_repo = github_repo
end
@@ -95,17 +112,17 @@ class GithubRepos
return found_github_repo if @github_repo
repos.each do |repo|
- print "ID: #{repo['id'].to_s.bright}".color(:green)
- print "\tName: #{repo['full_name']}\n".color(:green)
+ print "ID: #{repo.id.to_s.bright}".color(:green)
+ print "\tName: #{repo.full_name}\n".color(:green)
end
print 'ID? '.bright
- repos.find { |repo| repo['id'] == repo_id }
+ repos.find { |repo| repo.id == repo_id }
end
def found_github_repo
- repos.find { |repo| repo['full_name'] == @github_repo }
+ repos.find { |repo| repo.full_name == @github_repo }
end
def repo_id
@@ -113,7 +130,7 @@ class GithubRepos
end
def repos
- Github::Repositories.new(@options).fetch
+ @client.octokit.list_repositories
end
end
diff --git a/lib/tasks/lint.rake b/lib/tasks/lint.rake
index 7b63e93db0e..fe5032cae18 100644
--- a/lib/tasks/lint.rake
+++ b/lib/tasks/lint.rake
@@ -1,8 +1,69 @@
unless Rails.env.production?
namespace :lint do
+ task :static_verification_env do
+ ENV['STATIC_VERIFICATION'] = 'true'
+ end
+
+ desc "GitLab | lint | Static verification"
+ task static_verification: %w[
+ lint:static_verification_env
+ dev:load
+ ] do
+ Gitlab::Utils::Override.verify!
+ end
+
desc "GitLab | lint | Lint JavaScript files using ESLint"
task :javascript do
Rake::Task['eslint'].invoke
end
+
+ desc "GitLab | lint | Run several lint checks"
+ task :all do
+ status = 0
+
+ %w[
+ config_lint
+ haml_lint
+ scss_lint
+ flay
+ gettext:lint
+ lint:static_verification
+ ].each do |task|
+ pid = Process.fork do
+ rd, wr = IO.pipe
+ stdout = $stdout.dup
+ stderr = $stderr.dup
+ $stdout.reopen(wr)
+ $stderr.reopen(wr)
+
+ begin
+ begin
+ Rake::Task[task].invoke
+ rescue RuntimeError # The haml_lint tasks raise a RuntimeError
+ exit(1)
+ end
+ rescue SystemExit => ex
+ msg = "*** Rake task #{task} failed with the following error(s):"
+ raise ex
+ ensure
+ $stdout.reopen(stdout)
+ $stderr.reopen(stderr)
+ wr.close
+
+ if msg
+ warn "\n#{msg}\n\n"
+ IO.copy_stream(rd, $stderr)
+ else
+ IO.copy_stream(rd, $stdout)
+ end
+ end
+ end
+
+ Process.waitpid(pid)
+ status += $?.exitstatus
+ end
+
+ exit(status)
+ end
end
end
diff --git a/lib/tasks/migrate/migrate_iids.rake b/lib/tasks/migrate/migrate_iids.rake
index fc2cea8c016..aa2d01730d7 100644
--- a/lib/tasks/migrate/migrate_iids.rake
+++ b/lib/tasks/migrate/migrate_iids.rake
@@ -4,6 +4,7 @@ task migrate_iids: :environment do
Issue.where(iid: nil).find_each(batch_size: 100) do |issue|
begin
issue.set_iid
+
if issue.update_attribute(:iid, issue.iid)
print '.'
else
@@ -19,6 +20,7 @@ task migrate_iids: :environment do
MergeRequest.where(iid: nil).find_each(batch_size: 100) do |mr|
begin
mr.set_iid
+
if mr.update_attribute(:iid, mr.iid)
print '.'
else
@@ -34,6 +36,7 @@ task migrate_iids: :environment do
Milestone.where(iid: nil).find_each(batch_size: 100) do |m|
begin
m.set_iid
+
if m.update_attribute(:iid, m.iid)
print '.'
else
diff --git a/lib/tasks/migrate/setup_postgresql.rake b/lib/tasks/migrate/setup_postgresql.rake
index 9cc986535e1..1c7a8a90f5c 100644
--- a/lib/tasks/migrate/setup_postgresql.rake
+++ b/lib/tasks/migrate/setup_postgresql.rake
@@ -1,18 +1,22 @@
-require Rails.root.join('lib/gitlab/database')
-require Rails.root.join('lib/gitlab/database/migration_helpers')
-require Rails.root.join('db/migrate/20151007120511_namespaces_projects_path_lower_indexes')
-require Rails.root.join('db/migrate/20151008110232_add_users_lower_username_email_indexes')
-require Rails.root.join('db/migrate/20161212142807_add_lower_path_index_to_routes')
-require Rails.root.join('db/migrate/20170317203554_index_routes_path_for_like')
-require Rails.root.join('db/migrate/20170724214302_add_lower_path_index_to_redirect_routes')
-require Rails.root.join('db/migrate/20170503185032_index_redirect_routes_path_for_like')
-
desc 'GitLab | Sets up PostgreSQL'
task setup_postgresql: :environment do
+ require Rails.root.join('db/migrate/20151007120511_namespaces_projects_path_lower_indexes')
+ require Rails.root.join('db/migrate/20151008110232_add_users_lower_username_email_indexes')
+ require Rails.root.join('db/migrate/20161212142807_add_lower_path_index_to_routes')
+ require Rails.root.join('db/migrate/20170317203554_index_routes_path_for_like')
+ require Rails.root.join('db/migrate/20170724214302_add_lower_path_index_to_redirect_routes')
+ require Rails.root.join('db/migrate/20170503185032_index_redirect_routes_path_for_like')
+ require Rails.root.join('db/migrate/20171220191323_add_index_on_namespaces_lower_name.rb')
+ require Rails.root.join('db/migrate/20180113220114_rework_redirect_routes_indexes.rb')
+ require Rails.root.join('db/migrate/20180215181245_users_name_lower_index.rb')
+
NamespacesProjectsPathLowerIndexes.new.up
AddUsersLowerUsernameEmailIndexes.new.up
AddLowerPathIndexToRoutes.new.up
IndexRoutesPathForLike.new.up
AddLowerPathIndexToRedirectRoutes.new.up
IndexRedirectRoutesPathForLike.new.up
+ AddIndexOnNamespacesLowerName.new.up
+ ReworkRedirectRoutesIndexes.new.up
+ UsersNameLowerIndex.new.up
end
diff --git a/lib/tasks/plugins.rake b/lib/tasks/plugins.rake
new file mode 100644
index 00000000000..e73dd7e68df
--- /dev/null
+++ b/lib/tasks/plugins.rake
@@ -0,0 +1,16 @@
+namespace :plugins do
+ desc 'Validate existing plugins'
+ task validate: :environment do
+ puts 'Validating plugins from /plugins directory'
+
+ Gitlab::Plugin.files.each do |file|
+ success, message = Gitlab::Plugin.execute(file, Gitlab::DataBuilder::Push::SAMPLE_DATA)
+
+ if success
+ puts "* #{file} succeed (zero exit code)."
+ else
+ puts "* #{file} failure (non-zero exit code). #{message}"
+ end
+ end
+ end
+end
diff --git a/lib/tasks/tokens.rake b/lib/tasks/tokens.rake
index ad1818ff1fa..693597afdf8 100644
--- a/lib/tasks/tokens.rake
+++ b/lib/tasks/tokens.rake
@@ -1,12 +1,7 @@
require_relative '../../app/models/concerns/token_authenticatable.rb'
namespace :tokens do
- desc "Reset all GitLab user auth tokens"
- task reset_all_auth: :environment do
- reset_all_users_token(:reset_authentication_token!)
- end
-
- desc "Reset all GitLab email tokens"
+ desc "Reset all GitLab incoming email tokens"
task reset_all_email: :environment do
reset_all_users_token(:reset_incoming_email_token!)
end
@@ -31,11 +26,6 @@ class TmpUser < ActiveRecord::Base
self.table_name = 'users'
- def reset_authentication_token!
- write_new_token(:authentication_token)
- save!(validate: false)
- end
-
def reset_incoming_email_token!
write_new_token(:incoming_email_token)
save!(validate: false)