summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/appearances_controller_spec.rb48
-rw-r--r--spec/controllers/concerns/send_file_upload_spec.rb2
-rw-r--r--spec/controllers/dashboard/milestones_controller_spec.rb2
-rw-r--r--spec/controllers/google_api/authorizations_controller_spec.rb60
-rw-r--r--spec/controllers/graphql_controller_spec.rb112
-rw-r--r--spec/controllers/groups/clusters/applications_controller_spec.rb100
-rw-r--r--spec/controllers/groups/shared_projects_controller_spec.rb2
-rw-r--r--spec/controllers/groups_controller_spec.rb4
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb2
-rw-r--r--spec/controllers/projects/autocomplete_sources_controller_spec.rb31
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb58
-rw-r--r--spec/controllers/projects/clusters/applications_controller_spec.rb97
-rw-r--r--spec/controllers/projects/group_links_controller_spec.rb37
-rw-r--r--spec/controllers/projects/settings/operations_controller_spec.rb63
-rw-r--r--spec/controllers/snippets_controller_spec.rb4
-rw-r--r--spec/controllers/users_controller_spec.rb34
-rw-r--r--spec/factories/ci/group_variables.rb1
-rw-r--r--spec/factories/ci/variables.rb1
-rw-r--r--spec/factories/merge_requests.rb9
-rw-r--r--spec/features/admin/admin_runners_spec.rb50
-rw-r--r--spec/features/dashboard/activity_spec.rb17
-rw-r--r--spec/features/group_variables_spec.rb2
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb84
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb18
-rw-r--r--spec/features/issues/user_uses_quick_actions_spec.rb4
-rw-r--r--spec/features/issues_spec.rb4
-rw-r--r--spec/features/markdown/copy_as_gfm_spec.rb11
-rw-r--r--spec/features/merge_request/maintainer_edits_fork_spec.rb4
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb5
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb8
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb6
-rw-r--r--spec/features/merge_requests/user_lists_merge_requests_spec.rb4
-rw-r--r--spec/features/profiles/active_sessions_spec.rb48
-rw-r--r--spec/features/project_variables_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb5
-rw-r--r--spec/features/projects/blobs/edit_spec.rb9
-rw-r--r--spec/features/projects/clusters/applications_spec.rb80
-rw-r--r--spec/features/projects/environments/environment_spec.rb6
-rw-r--r--spec/features/projects/environments/environments_spec.rb8
-rw-r--r--spec/features/projects/members/invite_group_spec.rb2
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb2
-rw-r--r--spec/features/projects/settings/operations_settings_spec.rb77
-rw-r--r--spec/features/projects/settings/project_settings_spec.rb43
-rw-r--r--spec/features/projects/settings/user_manages_group_links_spec.rb1
-rw-r--r--spec/features/projects/user_sees_sidebar_spec.rb102
-rw-r--r--spec/features/security/group/private_access_spec.rb32
-rw-r--r--spec/finders/admin/runners_finder_spec.rb8
-rw-r--r--spec/finders/autocomplete/acts_as_taggable_on/tags_finder_spec.rb66
-rw-r--r--spec/finders/issues_finder_spec.rb55
-rw-r--r--spec/finders/merge_requests_finder_spec.rb516
-rw-r--r--spec/fixtures/security-reports/remediations/gl-dependency-scanning-report.json104
-rw-r--r--spec/fixtures/security-reports/remediations/remediation.patch180
-rw-r--r--spec/fixtures/security-reports/remediations/yarn.lock104
-rw-r--r--spec/fixtures/trace/sample_trace18
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js94
-rw-r--r--spec/graphql/resolvers/metadata_resolver_spec.rb11
-rw-r--r--spec/graphql/types/metadata_type_spec.rb5
-rw-r--r--spec/graphql/types/query_type_spec.rb15
-rw-r--r--spec/helpers/emails_helper_spec.rb54
-rw-r--r--spec/helpers/preferences_helper_spec.rb16
-rw-r--r--spec/helpers/projects_helper_spec.rb50
-rw-r--r--spec/javascripts/api_spec.js34
-rw-r--r--spec/javascripts/awards_handler_spec.js11
-rw-r--r--spec/javascripts/badges/store/actions_spec.js4
-rw-r--r--spec/javascripts/behaviors/copy_as_gfm_spec.js4
-rw-r--r--spec/javascripts/boards/components/issue_due_date_spec.js4
-rw-r--r--spec/javascripts/clusters/components/application_row_spec.js10
-rw-r--r--spec/javascripts/clusters/components/applications_spec.js169
-rw-r--r--spec/javascripts/clusters/services/mock_data.js12
-rw-r--r--spec/javascripts/clusters/stores/clusters_store_spec.js1
-rw-r--r--spec/javascripts/diffs/components/app_spec.js100
-rw-r--r--spec/javascripts/diffs/components/changed_files_dropdown_spec.js1
-rw-r--r--spec/javascripts/diffs/components/compare_versions_dropdown_spec.js153
-rw-r--r--spec/javascripts/diffs/components/diff_content_spec.js2
-rw-r--r--spec/javascripts/diffs/components/diff_file_header_spec.js151
-rw-r--r--spec/javascripts/diffs/components/diff_file_spec.js2
-rw-r--r--spec/javascripts/diffs/components/edit_button_spec.js62
-rw-r--r--spec/javascripts/diffs/components/hidden_files_warning_spec.js49
-rw-r--r--spec/javascripts/diffs/components/inline_diff_view_spec.js2
-rw-r--r--spec/javascripts/diffs/components/parallel_diff_view_spec.js6
-rw-r--r--spec/javascripts/diffs/mock_data/diff_discussions.js1
-rw-r--r--spec/javascripts/diffs/store/actions_spec.js130
-rw-r--r--spec/javascripts/diffs/store/mutations_spec.js62
-rw-r--r--spec/javascripts/diffs/store/utils_spec.js45
-rw-r--r--spec/javascripts/dirty_submit/dirty_submit_form_spec.js2
-rw-r--r--spec/javascripts/emoji_spec.js189
-rw-r--r--spec/javascripts/environments/confirm_rollback_modal_spec.js70
-rw-r--r--spec/javascripts/environments/environment_rollback_spec.js32
-rw-r--r--spec/javascripts/error_tracking/components/error_tracking_list_spec.js22
-rw-r--r--spec/javascripts/error_tracking_settings/components/app_spec.js63
-rw-r--r--spec/javascripts/error_tracking_settings/components/error_tracking_form_spec.js91
-rw-r--r--spec/javascripts/error_tracking_settings/components/project_dropdown_spec.js109
-rw-r--r--spec/javascripts/error_tracking_settings/mock.js92
-rw-r--r--spec/javascripts/error_tracking_settings/store/actions_spec.js191
-rw-r--r--spec/javascripts/error_tracking_settings/store/getters_spec.js93
-rw-r--r--spec/javascripts/error_tracking_settings/store/mutation_spec.js82
-rw-r--r--spec/javascripts/error_tracking_settings/utils_spec.js29
-rw-r--r--spec/javascripts/fixtures/autocomplete_sources.rb40
-rw-r--r--spec/javascripts/fixtures/emojis.rb16
-rw-r--r--spec/javascripts/fixtures/static_fixtures.rb22
-rw-r--r--spec/javascripts/ide/components/new_dropdown/modal_spec.js55
-rw-r--r--spec/javascripts/ide/lib/files_spec.js77
-rw-r--r--spec/javascripts/ide/stores/actions/merge_request_spec.js93
-rw-r--r--spec/javascripts/ide/stores/actions/project_spec.js1
-rw-r--r--spec/javascripts/ide/stores/actions/tree_spec.js7
-rw-r--r--spec/javascripts/ide/stores/actions_spec.js28
-rw-r--r--spec/javascripts/ide/stores/mutations_spec.js14
-rw-r--r--spec/javascripts/import_projects/components/import_projects_table_spec.js8
-rw-r--r--spec/javascripts/import_projects/components/imported_project_table_row_spec.js3
-rw-r--r--spec/javascripts/import_projects/components/provider_repo_table_row_spec.js7
-rw-r--r--spec/javascripts/jobs/store/getters_spec.js55
-rw-r--r--spec/javascripts/lib/utils/number_utility_spec.js11
-rw-r--r--spec/javascripts/monitoring/charts/area_spec.js64
-rw-r--r--spec/javascripts/notes/components/note_form_spec.js157
-rw-r--r--spec/javascripts/notes/stores/getters_spec.js8
-rw-r--r--spec/javascripts/notes/stores/mutation_spec.js17
-rw-r--r--spec/javascripts/persistent_user_callout_spec.js88
-rw-r--r--spec/javascripts/test_bundle.js3
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js20
-rw-r--r--spec/javascripts/vue_shared/components/issue/related_issuable_item_spec.js194
-rw-r--r--spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js111
-rw-r--r--spec/javascripts/vue_shared/components/table_pagination_spec.js182
-rw-r--r--spec/lib/constraints/project_url_constrainer_spec.rb4
-rw-r--r--spec/lib/event_filter_spec.rb8
-rw-r--r--spec/lib/gitlab/auth/ldap/config_spec.rb153
-rw-r--r--spec/lib/gitlab/checks/branch_check_spec.rb124
-rw-r--r--spec/lib/gitlab/ci/build/policy/changes_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/policy/refs_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/build/policy/variables_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/config/external/file/base_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/config/external/file/project_spec.rb47
-rw-r--r--spec/lib/gitlab/ci/config/external/file/remote_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/config/external/file/template_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb82
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/templates_spec.rb37
-rw-r--r--spec/lib/gitlab/ci/variables/collection/item_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/variables/collection_spec.rb12
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb2
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb1
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb2
-rw-r--r--spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb4
-rw-r--r--spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb9
-rw-r--r--spec/lib/gitlab/dependency_linker/gemspec_linker_spec.rb4
-rw-r--r--spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb18
-rw-r--r--spec/lib/gitlab/dependency_linker/parser/gemfile_spec.rb44
-rw-r--r--spec/lib/gitlab/dependency_linker/podfile_linker_spec.rb5
-rw-r--r--spec/lib/gitlab/dependency_linker/podspec_linker_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb201
-rw-r--r--spec/lib/gitlab/git/blob_spec.rb16
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb16
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb10
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb74
-rw-r--r--spec/lib/gitlab/gitaly_client/storage_settings_spec.rb10
-rw-r--r--spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb24
-rw-r--r--spec/lib/gitlab/hashed_storage/migrator_spec.rb114
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml2
-rw-r--r--spec/lib/gitlab/import_export/merge_request_parser_spec.rb16
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb28
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb30
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb2
-rw-r--r--spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb4
-rw-r--r--spec/lib/gitlab/quick_actions/command_definition_spec.rb7
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb74
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/shutdown_spec.rb88
-rw-r--r--spec/lib/gitlab/sidekiq_signals_spec.rb69
-rw-r--r--spec/lib/gitlab_spec.rb14
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb2
-rw-r--r--spec/mailers/abuse_report_mailer_spec.rb25
-rw-r--r--spec/mailers/email_rejection_mailer_spec.rb16
-rw-r--r--spec/mailers/emails/auto_devops_spec.rb3
-rw-r--r--spec/mailers/emails/issues_spec.rb9
-rw-r--r--spec/mailers/notify_spec.rb162
-rw-r--r--spec/mailers/repository_check_mailer_spec.rb7
-rw-r--r--spec/models/active_session_spec.rb5
-rw-r--r--spec/models/appearance_spec.rb18
-rw-r--r--spec/models/board_group_recent_visit_spec.rb22
-rw-r--r--spec/models/board_project_recent_visit_spec.rb22
-rw-r--r--spec/models/ci/bridge_spec.rb15
-rw-r--r--spec/models/ci/build_spec.rb294
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb4
-rw-r--r--spec/models/ci/group_variable_spec.rb1
-rw-r--r--spec/models/ci/pipeline_spec.rb87
-rw-r--r--spec/models/ci/variable_spec.rb1
-rw-r--r--spec/models/clusters/applications/jupyter_spec.rb5
-rw-r--r--spec/models/clusters/applications/knative_spec.rb26
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb16
-rw-r--r--spec/models/concerns/has_ref_spec.rb20
-rw-r--r--spec/models/concerns/has_variable_spec.rb2
-rw-r--r--spec/models/concerns/issuable_spec.rb97
-rw-r--r--spec/models/concerns/maskable_spec.rb76
-rw-r--r--spec/models/concerns/milestoneish_spec.rb142
-rw-r--r--spec/models/concerns/sortable_spec.rb2
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/base_spec.rb32
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb28
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb35
-rw-r--r--spec/models/issue/metrics_spec.rb6
-rw-r--r--spec/models/merge_request_diff_spec.rb14
-rw-r--r--spec/models/merge_request_spec.rb6
-rw-r--r--spec/models/milestone_spec.rb8
-rw-r--r--spec/models/project_services/jira_service_spec.rb3
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb61
-rw-r--r--spec/models/project_spec.rb52
-rw-r--r--spec/models/protected_branch_spec.rb28
-rw-r--r--spec/models/repository_spec.rb16
-rw-r--r--spec/policies/commit_policy_spec.rb55
-rw-r--r--spec/policies/global_policy_spec.rb12
-rw-r--r--spec/policies/group_policy_spec.rb40
-rw-r--r--spec/policies/note_policy_spec.rb94
-rw-r--r--spec/policies/project_policy_spec.rb23
-rw-r--r--spec/presenters/blobs/unfold_presenter_spec.rb159
-rw-r--r--spec/presenters/group_clusterable_presenter_spec.rb8
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb23
-rw-r--r--spec/presenters/project_clusterable_presenter_spec.rb8
-rw-r--r--spec/requests/api/commits_spec.rb15
-rw-r--r--spec/requests/api/graphql/metadata_query_spec.rb32
-rw-r--r--spec/requests/api/graphql_spec.rb86
-rw-r--r--spec/requests/api/group_variables_spec.rb4
-rw-r--r--spec/requests/api/issues_spec.rb220
-rw-r--r--spec/requests/api/jobs_spec.rb43
-rw-r--r--spec/requests/api/merge_requests_spec.rb256
-rw-r--r--spec/requests/api/projects_spec.rb71
-rw-r--r--spec/requests/api/release/links_spec.rb16
-rw-r--r--spec/requests/api/runner_spec.rb20
-rw-r--r--spec/requests/api/runners_spec.rb33
-rw-r--r--spec/requests/api/snippets_spec.rb76
-rw-r--r--spec/requests/api/todos_spec.rb52
-rw-r--r--spec/requests/api/variables_spec.rb4
-rw-r--r--spec/requests/api/version_spec.rb18
-rw-r--r--spec/requests/git_http_spec.rb134
-rw-r--r--spec/serializers/merge_request_for_pipeline_entity_spec.rb29
-rw-r--r--spec/serializers/pipeline_entity_spec.rb45
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb38
-rw-r--r--spec/serializers/provider_repo_entity_spec.rb2
-rw-r--r--spec/services/boards/visits/latest_service_spec.rb12
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb4
-rw-r--r--spec/services/clusters/applications/patch_service_spec.rb128
-rw-r--r--spec/services/clusters/applications/update_service_spec.rb72
-rw-r--r--spec/services/error_tracking/list_projects_service_spec.rb4
-rw-r--r--spec/services/files/multi_service_spec.rb16
-rw-r--r--spec/services/groups/transfer_service_spec.rb29
-rw-r--r--spec/services/issuable/common_system_notes_service_spec.rb4
-rw-r--r--spec/services/issues/build_service_spec.rb76
-rw-r--r--spec/services/issues/update_service_spec.rb6
-rw-r--r--spec/services/merge_requests/build_service_spec.rb9
-rw-r--r--spec/services/merge_requests/create_service_spec.rb4
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb2
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb96
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb4
-rw-r--r--spec/services/merge_requests/update_service_spec.rb6
-rw-r--r--spec/services/notes/create_service_spec.rb13
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb24
-rw-r--r--spec/services/notification_service_spec.rb2
-rw-r--r--spec/services/projects/group_links/create_service_spec.rb8
-rw-r--r--spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb6
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb12
-rw-r--r--spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb100
-rw-r--r--spec/services/projects/hashed_storage/rollback_repository_service_spec.rb111
-rw-r--r--spec/services/projects/hashed_storage/rollback_service_spec.rb57
-rw-r--r--spec/services/projects/operations/update_service_spec.rb50
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb10
-rw-r--r--spec/services/suggestions/apply_service_spec.rb11
-rw-r--r--spec/services/suggestions/create_service_spec.rb47
-rw-r--r--spec/services/system_note_service_spec.rb9
-rw-r--r--spec/spec_helper.rb9
-rw-r--r--spec/support/api/schema_matcher.rb2
-rw-r--r--spec/support/database_cleaner.rb56
-rw-r--r--spec/support/db_cleaner.rb50
-rw-r--r--spec/support/features/reportable_note_shared_examples.rb2
-rw-r--r--spec/support/features/variable_list_shared_examples.rb8
-rw-r--r--spec/support/helpers/file_mover_helpers.rb12
-rw-r--r--spec/support/helpers/graphql_helpers.rb20
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb27
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb15
-rw-r--r--spec/support/helpers/login_helpers.rb13
-rw-r--r--spec/support/import_export/export_file_helper.rb2
-rw-r--r--spec/support/matchers/access_matchers.rb35
-rw-r--r--spec/support/pg_stat_activity.rb19
-rw-r--r--spec/support/shared_contexts/services_shared_context.rb8
-rw-r--r--spec/support/shared_examples/issuable_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/notify_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/requests/api/discussions.rb31
-rw-r--r--spec/support/shared_examples/requests/api/merge_requests_list.rb31
-rw-r--r--spec/support/shared_examples/views/nav_sidebar.rb11
-rw-r--r--spec/support/webmock.rb10
-rw-r--r--spec/uploaders/file_mover_spec.rb33
-rw-r--r--spec/validators/sha_validator_spec.rb42
-rw-r--r--spec/views/ci/status/_icon.html.haml_spec.rb89
-rw-r--r--spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb13
-rw-r--r--spec/views/layouts/nav/sidebar/_instance_statistics.html.haml_spec.rb7
-rw-r--r--spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb13
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb2
-rw-r--r--spec/views/projects/deployments/_confirm_rollback_modal_spec.html.rb63
-rw-r--r--spec/views/projects/issues/_merge_requests_status.html.haml_spec.rb6
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb1
-rw-r--r--spec/workers/hashed_storage/project_migrate_worker_spec.rb (renamed from spec/workers/project_migrate_hashed_storage_worker_spec.rb)2
-rw-r--r--spec/workers/hashed_storage/project_rollback_worker_spec.rb50
-rw-r--r--spec/workers/hashed_storage/rollbacker_worker_spec.rb27
-rw-r--r--spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb4
304 files changed, 10171 insertions, 2008 deletions
diff --git a/spec/controllers/admin/appearances_controller_spec.rb b/spec/controllers/admin/appearances_controller_spec.rb
index 4ddd0953267..621aa148301 100644
--- a/spec/controllers/admin/appearances_controller_spec.rb
+++ b/spec/controllers/admin/appearances_controller_spec.rb
@@ -1,15 +1,17 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Admin::AppearancesController do
let(:admin) { create(:admin) }
- let(:header_message) { "Header message" }
- let(:footer_message) { "Footer" }
+ let(:header_message) { 'Header message' }
+ let(:footer_message) { 'Footer' }
describe 'POST #create' do
let(:create_params) do
{
- title: "Foo",
- description: "Bar",
+ title: 'Foo',
+ description: 'Bar',
header_message: header_message,
footer_message: footer_message
}
@@ -24,9 +26,26 @@ describe Admin::AppearancesController do
expect(Appearance.current).to have_attributes(
header_message: header_message,
- footer_message: footer_message
+ footer_message: footer_message,
+ email_header_and_footer_enabled: false,
+ message_background_color: '#E75E40',
+ message_font_color: '#FFFFFF'
)
end
+
+ context 'when enabling header and footer in email' do
+ it 'creates appearance with enabled flag' do
+ create_params[:email_header_and_footer_enabled] = true
+
+ post :create, params: { appearance: create_params }
+
+ expect(Appearance.current).to have_attributes(
+ header_message: header_message,
+ footer_message: footer_message,
+ email_header_and_footer_enabled: true
+ )
+ end
+ end
end
describe 'PUT #update' do
@@ -48,8 +67,25 @@ describe Admin::AppearancesController do
expect(Appearance.current).to have_attributes(
header_message: header_message,
- footer_message: footer_message
+ footer_message: footer_message,
+ email_header_and_footer_enabled: false,
+ message_background_color: '#E75E40',
+ message_font_color: '#FFFFFF'
)
end
+
+ context 'when enabling header and footer in email' do
+ it 'updates appearance with enabled flag' do
+ update_params[:email_header_and_footer_enabled] = true
+
+ post :update, params: { appearance: update_params }
+
+ expect(Appearance.current).to have_attributes(
+ header_message: header_message,
+ footer_message: footer_message,
+ email_header_and_footer_enabled: true
+ )
+ end
+ end
end
end
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb
index cf3b24f50a3..aa71a247956 100644
--- a/spec/controllers/concerns/send_file_upload_spec.rb
+++ b/spec/controllers/concerns/send_file_upload_spec.rb
@@ -112,7 +112,7 @@ describe SendFileUpload do
it 'sends a file with a custom type' do
headers = double
- expected_headers = %r(response-content-disposition=attachment%3B%20filename%3D%22test.js%22%3B%20filename%2A%3DUTF-8%27%27test.js&response-content-type=application/ecmascript)
+ expected_headers = /response-content-disposition=attachment%3B%20filename%3D%22test.js%22%3B%20filename%2A%3DUTF-8%27%27test.js&response-content-type=application%2Fecmascript/
expect(Gitlab::Workhorse).to receive(:send_url).with(expected_headers).and_call_original
expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-url:/)
diff --git a/spec/controllers/dashboard/milestones_controller_spec.rb b/spec/controllers/dashboard/milestones_controller_spec.rb
index 4b164d0aa6b..ab40b4eb178 100644
--- a/spec/controllers/dashboard/milestones_controller_spec.rb
+++ b/spec/controllers/dashboard/milestones_controller_spec.rb
@@ -13,7 +13,7 @@ describe Dashboard::MilestonesController do
)
end
let(:issue) { create(:issue, project: project, milestone: project_milestone) }
- let(:group_issue) { create(:issue, milestone: group_milestone) }
+ let(:group_issue) { create(:issue, milestone: group_milestone, project: create(:project, group: group)) }
let!(:label) { create(:label, project: project, title: 'Issue Label', issues: [issue]) }
let!(:group_label) { create(:group_label, group: group, title: 'Group Issue Label', issues: [group_issue]) }
diff --git a/spec/controllers/google_api/authorizations_controller_spec.rb b/spec/controllers/google_api/authorizations_controller_spec.rb
index 1e8e82da4f3..d9ba85cf56a 100644
--- a/spec/controllers/google_api/authorizations_controller_spec.rb
+++ b/spec/controllers/google_api/authorizations_controller_spec.rb
@@ -6,7 +6,7 @@ describe GoogleApi::AuthorizationsController do
let(:token) { 'token' }
let(:expires_at) { 1.hour.since.strftime('%s') }
- subject { get :callback, params: { code: 'xxx', state: @state } }
+ subject { get :callback, params: { code: 'xxx', state: state } }
before do
sign_in(user)
@@ -15,35 +15,57 @@ describe GoogleApi::AuthorizationsController do
.to receive(:get_token).and_return([token, expires_at])
end
- it 'sets token and expires_at in session' do
- subject
+ shared_examples_for 'access denied' do
+ it 'returns a 404' do
+ subject
- expect(session[GoogleApi::CloudPlatform::Client.session_key_for_token])
- .to eq(token)
- expect(session[GoogleApi::CloudPlatform::Client.session_key_for_expires_at])
- .to eq(expires_at)
+ expect(session[GoogleApi::CloudPlatform::Client.session_key_for_token]).to be_nil
+ expect(response).to have_http_status(:not_found)
+ end
end
- context 'when redirect uri key is stored in state' do
- set(:project) { create(:project) }
- let(:redirect_uri) { project_clusters_url(project).to_s }
+ context 'session key is present' do
+ let(:session_key) { 'session-key' }
+ let(:redirect_uri) { 'example.com' }
before do
- @state = GoogleApi::CloudPlatform::Client
- .new_session_key_for_redirect_uri do |key|
- session[key] = redirect_uri
+ session[GoogleApi::CloudPlatform::Client.session_key_for_redirect_uri(session_key)] = redirect_uri
+ end
+
+ context 'session key matches state param' do
+ let(:state) { session_key }
+
+ it 'sets token and expires_at in session' do
+ subject
+
+ expect(session[GoogleApi::CloudPlatform::Client.session_key_for_token])
+ .to eq(token)
+ expect(session[GoogleApi::CloudPlatform::Client.session_key_for_expires_at])
+ .to eq(expires_at)
+ end
+
+ it 'redirects to the URL stored in state param' do
+ expect(subject).to redirect_to(redirect_uri)
end
end
- it 'redirects to the URL stored in state param' do
- expect(subject).to redirect_to(redirect_uri)
+ context 'session key does not match state param' do
+ let(:state) { 'bad-key' }
+
+ it_behaves_like 'access denied'
end
- end
- context 'when redirection url is not stored in state' do
- it 'redirects to root_path' do
- expect(subject).to redirect_to(root_path)
+ context 'state param is blank' do
+ let(:state) { '' }
+
+ it_behaves_like 'access denied'
end
end
+
+ context 'state param is present, but session key is blank' do
+ let(:state) { 'session-key' }
+
+ it_behaves_like 'access denied'
+ end
end
end
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
deleted file mode 100644
index a0f40874db1..00000000000
--- a/spec/controllers/graphql_controller_spec.rb
+++ /dev/null
@@ -1,112 +0,0 @@
-require 'spec_helper'
-
-describe GraphqlController do
- describe 'execute' do
- let(:user) { nil }
-
- before do
- sign_in(user) if user
-
- run_test_query!
- end
-
- subject { query_response }
-
- context 'graphql is disabled by feature flag' do
- let(:user) { nil }
-
- before do
- stub_feature_flags(graphql: false)
- end
-
- it 'returns 404' do
- run_test_query!
-
- expect(response).to have_gitlab_http_status(404)
- end
- end
-
- context 'signed out' do
- let(:user) { nil }
-
- it 'runs the query with current_user: nil' do
- is_expected.to eq('echo' => 'nil says: test success')
- end
- end
-
- context 'signed in' do
- let(:user) { create(:user, username: 'Simon') }
-
- it 'runs the query with current_user set' do
- is_expected.to eq('echo' => '"Simon" says: test success')
- end
- end
-
- context 'invalid variables' do
- it 'returns an error' do
- run_test_query!(variables: "This is not JSON")
-
- expect(response).to have_gitlab_http_status(422)
- expect(json_response['errors'].first['message']).not_to be_nil
- end
- end
- end
-
- context 'token authentication' do
- before do
- stub_authentication_activity_metrics(debug: false)
- end
-
- let(:user) { create(:user, username: 'Simon') }
- let(:personal_access_token) { create(:personal_access_token, user: user) }
-
- context "when the 'personal_access_token' param is populated with the personal access token" do
- it 'logs the user in' do
- expect(authentication_metrics)
- .to increment(:user_authenticated_counter)
- .and increment(:user_session_override_counter)
- .and increment(:user_sessionless_authentication_counter)
-
- run_test_query!(private_token: personal_access_token.token)
-
- expect(response).to have_gitlab_http_status(200)
- expect(query_response).to eq('echo' => '"Simon" says: test success')
- end
- end
-
- context 'when the personal access token has no api scope' do
- it 'does not log the user in' do
- personal_access_token.update(scopes: [:read_user])
-
- run_test_query!(private_token: personal_access_token.token)
-
- expect(response).to have_gitlab_http_status(200)
-
- expect(query_response).to eq('echo' => 'nil says: test success')
- end
- end
-
- context 'without token' do
- it 'shows public data' do
- run_test_query!
-
- expect(query_response).to eq('echo' => 'nil says: test success')
- end
- end
- end
-
- # Chosen to exercise all the moving parts in GraphqlController#execute
- def run_test_query!(variables: { 'text' => 'test success' }, private_token: nil)
- query = <<~QUERY
- query Echo($text: String) {
- echo(text: $text)
- }
- QUERY
-
- post :execute, params: { query: query, operationName: 'Echo', variables: variables, private_token: private_token }
- end
-
- def query_response
- json_response['data']
- end
-end
diff --git a/spec/controllers/groups/clusters/applications_controller_spec.rb b/spec/controllers/groups/clusters/applications_controller_spec.rb
index dd5263b077c..16a63536ea6 100644
--- a/spec/controllers/groups/clusters/applications_controller_spec.rb
+++ b/spec/controllers/groups/clusters/applications_controller_spec.rb
@@ -9,9 +9,25 @@ describe Groups::Clusters::ApplicationsController do
Clusters::Cluster::APPLICATIONS[application]
end
+ shared_examples 'a secure endpoint' do
+ it { expect { subject }.to be_allowed_for(:admin) }
+ it { expect { subject }.to be_allowed_for(:owner).of(group) }
+ it { expect { subject }.to be_allowed_for(:maintainer).of(group) }
+ it { expect { subject }.to be_denied_for(:developer).of(group) }
+ it { expect { subject }.to be_denied_for(:reporter).of(group) }
+ it { expect { subject }.to be_denied_for(:guest).of(group) }
+ it { expect { subject }.to be_denied_for(:user) }
+ it { expect { subject }.to be_denied_for(:external) }
+ end
+
+ let(:cluster) { create(:cluster, :group, :provided_by_gcp) }
+ let(:group) { cluster.group }
+
describe 'POST create' do
- let(:cluster) { create(:cluster, :group, :provided_by_gcp) }
- let(:group) { cluster.group }
+ subject do
+ post :create, params: params.merge(group_id: group)
+ end
+
let(:application) { 'helm' }
let(:params) { { application: application, id: cluster.id } }
@@ -26,7 +42,7 @@ describe Groups::Clusters::ApplicationsController do
it 'schedule an application installation' do
expect(ClusterInstallAppWorker).to receive(:perform_async).with(application, anything).once
- expect { go }.to change { current_application.count }
+ expect { subject }.to change { current_application.count }
expect(response).to have_http_status(:no_content)
expect(cluster.application_helm).to be_scheduled
end
@@ -37,7 +53,7 @@ describe Groups::Clusters::ApplicationsController do
end
it 'return 404' do
- expect { go }.not_to change { current_application.count }
+ expect { subject }.not_to change { current_application.count }
expect(response).to have_http_status(:not_found)
end
end
@@ -46,9 +62,7 @@ describe Groups::Clusters::ApplicationsController do
let(:application) { 'unkwnown-app' }
it 'return 404' do
- go
-
- expect(response).to have_http_status(:not_found)
+ is_expected.to have_http_status(:not_found)
end
end
@@ -58,9 +72,7 @@ describe Groups::Clusters::ApplicationsController do
end
it 'returns 400' do
- go
-
- expect(response).to have_http_status(:bad_request)
+ is_expected.to have_http_status(:bad_request)
end
end
end
@@ -70,18 +82,66 @@ describe Groups::Clusters::ApplicationsController do
allow(ClusterInstallAppWorker).to receive(:perform_async)
end
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(group) }
- it { expect { go }.to be_allowed_for(:maintainer).of(group) }
- it { expect { go }.to be_denied_for(:developer).of(group) }
- it { expect { go }.to be_denied_for(:reporter).of(group) }
- it { expect { go }.to be_denied_for(:guest).of(group) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
+ it_behaves_like 'a secure endpoint'
end
+ end
- def go
- post :create, params: params.merge(group_id: group)
+ describe 'PATCH update' do
+ subject do
+ patch :update, params: params.merge(group_id: group)
+ end
+
+ let!(:application) { create(:clusters_applications_cert_managers, :installed, cluster: cluster) }
+ let(:application_name) { application.name }
+ let(:params) { { application: application_name, id: cluster.id, email: "new-email@example.com" } }
+
+ describe 'functionality' do
+ let(:user) { create(:user) }
+
+ before do
+ group.add_maintainer(user)
+ sign_in(user)
+ end
+
+ context "when cluster and app exists" do
+ it "schedules an application update" do
+ expect(ClusterPatchAppWorker).to receive(:perform_async).with(application.name, anything).once
+
+ is_expected.to have_http_status(:no_content)
+
+ expect(cluster.application_cert_manager).to be_scheduled
+ end
+ end
+
+ context 'when cluster do not exists' do
+ before do
+ cluster.destroy!
+ end
+
+ it { is_expected.to have_http_status(:not_found) }
+ end
+
+ context 'when application is unknown' do
+ let(:application_name) { 'unkwnown-app' }
+
+ it { is_expected.to have_http_status(:not_found) }
+ end
+
+ context 'when application is already scheduled' do
+ before do
+ application.make_scheduled!
+ end
+
+ it { is_expected.to have_http_status(:bad_request) }
+ end
+ end
+
+ describe 'security' do
+ before do
+ allow(ClusterPatchAppWorker).to receive(:perform_async)
+ end
+
+ it_behaves_like 'a secure endpoint'
end
end
end
diff --git a/spec/controllers/groups/shared_projects_controller_spec.rb b/spec/controllers/groups/shared_projects_controller_spec.rb
index dab7700cf64..b0c20fb5a90 100644
--- a/spec/controllers/groups/shared_projects_controller_spec.rb
+++ b/spec/controllers/groups/shared_projects_controller_spec.rb
@@ -6,6 +6,8 @@ describe Groups::SharedProjectsController do
end
def share_project(project)
+ group.add_developer(user)
+
Projects::GroupLinks::CreateService.new(
project,
user,
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 7d87b33e503..21e5122c06b 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -227,9 +227,7 @@ describe GroupsController do
context 'searching' do
before do
- # Remove in https://gitlab.com/gitlab-org/gitlab-ce/issues/54643
- stub_feature_flags(use_cte_for_group_issues_search: false)
- stub_feature_flags(use_subquery_for_group_issues_search: true)
+ stub_feature_flags(attempt_group_search_optimizations: true)
end
it 'works with popularity sort' do
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index 232a5e2793b..e0da23ca0b8 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -193,7 +193,7 @@ describe OmniauthCallbacksController, type: :controller do
before do
stub_omniauth_saml_config({ enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'],
providers: [saml_config] })
- mock_auth_hash('saml', 'my-uid', user.email, mock_saml_response)
+ mock_auth_hash_with_saml_xml('saml', 'my-uid', user.email, mock_saml_response)
request.env["devise.mapping"] = Devise.mappings[:user]
request.env['omniauth.auth'] = Rails.application.env_config['omniauth.auth']
post :saml, params: { SAMLResponse: mock_saml_response }
diff --git a/spec/controllers/projects/autocomplete_sources_controller_spec.rb b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
index 4bc72042710..a9a058e7e17 100644
--- a/spec/controllers/projects/autocomplete_sources_controller_spec.rb
+++ b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
@@ -35,4 +35,35 @@ describe Projects::AutocompleteSourcesController do
avatar_url: user.avatar_url)
end
end
+
+ describe 'GET milestones' do
+ let(:group) { create(:group, :public) }
+ let(:project) { create(:project, :public, namespace: group) }
+ let!(:project_milestone) { create(:milestone, project: project) }
+ let!(:group_milestone) { create(:milestone, group: group) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'lists milestones' do
+ group.add_owner(user)
+
+ get :milestones, format: :json, params: { namespace_id: group.path, project_id: project.path }
+
+ milestone_titles = json_response.map { |milestone| milestone["title"] }
+ expect(milestone_titles).to match_array([project_milestone.title, group_milestone.title])
+ end
+
+ context 'when user cannot read project issues and merge requests' do
+ it 'renders 404' do
+ project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
+ project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE)
+
+ get :milestones, format: :json, params: { namespace_id: group.path, project_id: project.path }
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index 38957e96798..3801fca09dc 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -144,54 +144,34 @@ describe Projects::BlobController do
end
context 'when rendering for merge request' do
- it 'renders diff context lines Gitlab::Diff::Line array' do
- do_get(since: 1, to: 5, offset: 10, from_merge_request: true)
-
- lines = JSON.parse(response.body)
-
- expect(lines.first).to have_key('type')
- expect(lines.first).to have_key('rich_text')
- expect(lines.first).to have_key('rich_text')
+ let(:presenter) { double(:presenter, diff_lines: diff_lines) }
+ let(:diff_lines) do
+ Array.new(3, Gitlab::Diff::Line.new('plain', nil, nil, nil, nil, rich_text: 'rich'))
end
- context 'when rendering match lines' do
- it 'adds top match line when "since" is less than 1' do
- do_get(since: 5, to: 10, offset: 10, from_merge_request: true)
-
- match_line = JSON.parse(response.body).first
-
- expect(match_line['type']).to eq('match')
- expect(match_line['meta_data']).to have_key('old_pos')
- expect(match_line['meta_data']).to have_key('new_pos')
- end
-
- it 'does not add top match line when "since" is equal 1' do
- do_get(since: 1, to: 10, offset: 10, from_merge_request: true)
-
- match_line = JSON.parse(response.body).first
-
- expect(match_line['type']).to be_nil
- end
+ before do
+ allow(Blobs::UnfoldPresenter).to receive(:new).and_return(presenter)
+ end
- it 'adds bottom match line when "t"o is less than blob size' do
- do_get(since: 1, to: 5, offset: 10, from_merge_request: true, bottom: true)
+ it 'renders diff context lines Gitlab::Diff::Line array' do
+ do_get(since: 1, to: 2, offset: 0, from_merge_request: true)
- match_line = JSON.parse(response.body).last
+ lines = JSON.parse(response.body)
- expect(match_line['type']).to eq('match')
- expect(match_line['meta_data']).to have_key('old_pos')
- expect(match_line['meta_data']).to have_key('new_pos')
+ expect(lines.size).to eq(diff_lines.size)
+ lines.each do |line|
+ expect(line).to have_key('type')
+ expect(line['text']).to eq('plain')
+ expect(line['rich_text']).to eq('rich')
end
+ end
- it 'does not add bottom match line when "to" is less than blob size' do
- commit_id = project.repository.commit('master').id
- blob = project.repository.blob_at(commit_id, 'CHANGELOG')
- do_get(since: 1, to: blob.lines.count, offset: 10, from_merge_request: true, bottom: true)
+ it 'handles full being true' do
+ do_get(full: true, from_merge_request: true)
- match_line = JSON.parse(response.body).last
+ lines = JSON.parse(response.body)
- expect(match_line['type']).to be_nil
- end
+ expect(lines.size).to eq(diff_lines.size)
end
end
end
diff --git a/spec/controllers/projects/clusters/applications_controller_spec.rb b/spec/controllers/projects/clusters/applications_controller_spec.rb
index cb558259225..cd1a01f8acc 100644
--- a/spec/controllers/projects/clusters/applications_controller_spec.rb
+++ b/spec/controllers/projects/clusters/applications_controller_spec.rb
@@ -9,7 +9,22 @@ describe Projects::Clusters::ApplicationsController do
Clusters::Cluster::APPLICATIONS[application]
end
+ shared_examples 'a secure endpoint' do
+ it { expect { subject }.to be_allowed_for(:admin) }
+ it { expect { subject }.to be_allowed_for(:owner).of(project) }
+ it { expect { subject }.to be_allowed_for(:maintainer).of(project) }
+ it { expect { subject }.to be_denied_for(:developer).of(project) }
+ it { expect { subject }.to be_denied_for(:reporter).of(project) }
+ it { expect { subject }.to be_denied_for(:guest).of(project) }
+ it { expect { subject }.to be_denied_for(:user) }
+ it { expect { subject }.to be_denied_for(:external) }
+ end
+
describe 'POST create' do
+ subject do
+ post :create, params: params.merge(namespace_id: project.namespace, project_id: project)
+ end
+
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:project) { cluster.project }
let(:application) { 'helm' }
@@ -26,7 +41,7 @@ describe Projects::Clusters::ApplicationsController do
it 'schedule an application installation' do
expect(ClusterInstallAppWorker).to receive(:perform_async).with(application, anything).once
- expect { go }.to change { current_application.count }
+ expect { subject }.to change { current_application.count }
expect(response).to have_http_status(:no_content)
expect(cluster.application_helm).to be_scheduled
end
@@ -37,7 +52,7 @@ describe Projects::Clusters::ApplicationsController do
end
it 'return 404' do
- expect { go }.not_to change { current_application.count }
+ expect { subject }.not_to change { current_application.count }
expect(response).to have_http_status(:not_found)
end
end
@@ -46,9 +61,7 @@ describe Projects::Clusters::ApplicationsController do
let(:application) { 'unkwnown-app' }
it 'return 404' do
- go
-
- expect(response).to have_http_status(:not_found)
+ is_expected.to have_http_status(:not_found)
end
end
@@ -58,9 +71,7 @@ describe Projects::Clusters::ApplicationsController do
end
it 'returns 400' do
- go
-
- expect(response).to have_http_status(:bad_request)
+ is_expected.to have_http_status(:bad_request)
end
end
end
@@ -70,18 +81,68 @@ describe Projects::Clusters::ApplicationsController do
allow(ClusterInstallAppWorker).to receive(:perform_async)
end
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:maintainer).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
+ it_behaves_like 'a secure endpoint'
end
+ end
- def go
- post :create, params: params.merge(namespace_id: project.namespace, project_id: project)
+ describe 'PATCH update' do
+ subject do
+ patch :update, params: params.merge(namespace_id: project.namespace, project_id: project)
+ end
+
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+ let!(:application) { create(:clusters_applications_knative, :installed, cluster: cluster) }
+ let(:application_name) { application.name }
+ let(:params) { { application: application_name, id: cluster.id, hostname: "new.example.com" } }
+
+ describe 'functionality' do
+ let(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ context "when cluster and app exists" do
+ it "schedules an application update" do
+ expect(ClusterPatchAppWorker).to receive(:perform_async).with(application.name, anything).once
+
+ is_expected.to have_http_status(:no_content)
+
+ expect(cluster.application_knative).to be_scheduled
+ end
+ end
+
+ context 'when cluster do not exists' do
+ before do
+ cluster.destroy!
+ end
+
+ it { is_expected.to have_http_status(:not_found) }
+ end
+
+ context 'when application is unknown' do
+ let(:application_name) { 'unkwnown-app' }
+
+ it { is_expected.to have_http_status(:not_found) }
+ end
+
+ context 'when application is already scheduled' do
+ before do
+ application.make_scheduled!
+ end
+
+ it { is_expected.to have_http_status(:bad_request) }
+ end
+ end
+
+ describe 'security' do
+ before do
+ allow(ClusterPatchAppWorker).to receive(:perform_async)
+ end
+
+ it_behaves_like 'a secure endpoint'
end
end
end
diff --git a/spec/controllers/projects/group_links_controller_spec.rb b/spec/controllers/projects/group_links_controller_spec.rb
index 675eeff8d12..ce021b2f085 100644
--- a/spec/controllers/projects/group_links_controller_spec.rb
+++ b/spec/controllers/projects/group_links_controller_spec.rb
@@ -65,8 +65,24 @@ describe Projects::GroupLinksController do
end
end
+ context 'when user does not have access to the public group' do
+ let(:group) { create(:group, :public) }
+
+ include_context 'link project to group'
+
+ it 'renders 404' do
+ expect(response.status).to eq 404
+ end
+
+ it 'does not share project with that group' do
+ expect(group.shared_projects).not_to include project
+ end
+ end
+
context 'when project group id equal link group id' do
before do
+ group2.add_developer(user)
+
post(:create, params: {
namespace_id: project.namespace,
project_id: project,
@@ -102,5 +118,26 @@ describe Projects::GroupLinksController do
expect(flash[:alert]).to eq('Please select a group.')
end
end
+
+ context 'when link is not persisted in the database' do
+ before do
+ allow(::Projects::GroupLinks::CreateService).to receive_message_chain(:new, :execute)
+ .and_return({ status: :error, http_status: 409, message: 'error' })
+
+ post(:create, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ link_group_id: group.id,
+ link_group_access: ProjectGroupLink.default_access
+ })
+ end
+
+ it 'redirects to project group links page' do
+ expect(response).to redirect_to(
+ project_project_members_path(project)
+ )
+ expect(flash[:alert]).to eq('error')
+ end
+ end
end
end
diff --git a/spec/controllers/projects/settings/operations_controller_spec.rb b/spec/controllers/projects/settings/operations_controller_spec.rb
index d989ec22481..02a392f23c2 100644
--- a/spec/controllers/projects/settings/operations_controller_spec.rb
+++ b/spec/controllers/projects/settings/operations_controller_spec.rb
@@ -74,38 +74,55 @@ describe Projects::Settings::OperationsController do
{
error_tracking_setting_attributes: {
enabled: '1',
- api_url: 'http://url',
- token: 'token'
+ api_host: 'http://url',
+ token: 'token',
+ project: {
+ slug: 'sentry-project',
+ name: 'Sentry Project',
+ organization_slug: 'sentry-org',
+ organization_name: 'Sentry Org'
+ }
}
}
end
+
let(:error_tracking_permitted) do
ActionController::Parameters.new(error_tracking_params).permit!
end
- context 'when update succeeds' do
- before do
- stub_operations_update_service_returning(status: :success)
- end
-
- it 'shows a notice' do
- patch :update, params: project_params(project, error_tracking_params)
-
- expect(response).to redirect_to(operations_url)
- expect(flash[:notice]).to eq _('Your changes have been saved')
- end
- end
-
- context 'when update fails' do
- before do
- stub_operations_update_service_returning(status: :error)
+ context 'format json' do
+ context 'when update succeeds' do
+ before do
+ stub_operations_update_service_returning(status: :success)
+ end
+
+ it 'returns success status' do
+ patch :update,
+ params: project_params(project, error_tracking_params),
+ format: :json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq('status' => 'success')
+ expect(flash[:notice]).to eq('Your changes have been saved')
+ end
end
- it 'renders show page' do
- patch :update, params: project_params(project, error_tracking_params)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:show)
+ context 'when update fails' do
+ before do
+ stub_operations_update_service_returning(
+ status: :error,
+ message: 'error message'
+ )
+ end
+
+ it 'returns error' do
+ patch :update,
+ params: project_params(project, error_tracking_params),
+ format: :json
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).not_to be_nil
+ end
end
end
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index 5c6858dc7b2..77a94f26d8c 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -205,6 +205,8 @@ describe SnippetsController do
end
context 'when the snippet description contains a file' do
+ include FileMoverHelpers
+
let(:picture_file) { '/-/system/temp/secret56/picture.jpg' }
let(:text_file) { '/-/system/temp/secret78/text.txt' }
let(:description) do
@@ -215,6 +217,8 @@ describe SnippetsController do
before do
allow(FileUtils).to receive(:mkdir_p)
allow(FileUtils).to receive(:move)
+ stub_file_mover(text_file)
+ stub_file_mover(picture_file)
end
subject { create_snippet({ description: description }, { files: [picture_file, text_file] }) }
diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb
index af61026098b..4f6a6881193 100644
--- a/spec/controllers/users_controller_spec.rb
+++ b/spec/controllers/users_controller_spec.rb
@@ -185,13 +185,32 @@ describe UsersController do
context 'for user' do
context 'with public profile' do
- it 'renders calendar_activities' do
- push_data = Gitlab::DataBuilder::Push.build_sample(project, public_user)
- EventCreateService.new.push(project, public_user, push_data)
+ let(:issue) { create(:issue, project: project, author: user) }
+ let(:note) { create(:note, noteable: issue, author: user, project: project) }
+
+ render_views
+
+ before do
+ create_push_event
+ create_note_event
+ end
+ it 'renders calendar_activities' do
get :calendar_activities, params: { username: public_user.username }
+
expect(assigns[:events]).not_to be_empty
end
+
+ it 'avoids N+1 queries', :request_store do
+ get :calendar_activities, params: { username: public_user.username }
+
+ control = ActiveRecord::QueryRecorder.new { get :calendar_activities, params: { username: public_user.username } }
+
+ create_push_event
+ create_note_event
+
+ expect { get :calendar_activities, params: { username: public_user.username } }.not_to exceed_query_limit(control)
+ end
end
context 'with private profile' do
@@ -203,6 +222,15 @@ describe UsersController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ def create_push_event
+ push_data = Gitlab::DataBuilder::Push.build_sample(project, public_user)
+ EventCreateService.new.push(project, public_user, push_data)
+ end
+
+ def create_note_event
+ EventCreateService.new.leave_note(note, public_user)
+ end
end
end
diff --git a/spec/factories/ci/group_variables.rb b/spec/factories/ci/group_variables.rb
index 64716842b12..9bf520a2c0a 100644
--- a/spec/factories/ci/group_variables.rb
+++ b/spec/factories/ci/group_variables.rb
@@ -2,6 +2,7 @@ FactoryBot.define do
factory :ci_group_variable, class: Ci::GroupVariable do
sequence(:key) { |n| "VARIABLE_#{n}" }
value 'VARIABLE_VALUE'
+ masked false
trait(:protected) do
protected true
diff --git a/spec/factories/ci/variables.rb b/spec/factories/ci/variables.rb
index 3d014b9b54f..97a7c9ba252 100644
--- a/spec/factories/ci/variables.rb
+++ b/spec/factories/ci/variables.rb
@@ -2,6 +2,7 @@ FactoryBot.define do
factory :ci_variable, class: Ci::Variable do
sequence(:key) { |n| "VARIABLE_#{n}" }
value 'VARIABLE_VALUE'
+ masked false
trait(:protected) do
protected true
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 2392bfc4a53..18f724770b5 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -101,6 +101,15 @@ FactoryBot.define do
end
end
+ trait :with_merge_request_pipeline do
+ after(:build) do |merge_request|
+ merge_request.merge_request_pipelines << build(:ci_pipeline,
+ source: :merge_request_event,
+ merge_request: merge_request,
+ project: merge_request.source_project)
+ end
+ end
+
trait :deployed_review_app do
target_branch 'pages-deploy-target'
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index ed9c0ea9ac0..97b432a6751 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -141,6 +141,56 @@ describe "Admin Runners" do
end
end
+ describe 'filter by tag', :js do
+ it 'shows correct runner when tag matches' do
+ create :ci_runner, description: 'runner-blue', tag_list: ['blue']
+ create :ci_runner, description: 'runner-red', tag_list: ['red']
+
+ visit admin_runners_path
+
+ expect(page).to have_content 'runner-blue'
+ expect(page).to have_content 'runner-red'
+
+ input_filtered_search_keys('tag:blue')
+
+ expect(page).to have_content 'runner-blue'
+ expect(page).not_to have_content 'runner-red'
+ end
+
+ it 'shows no runner when tag does not match' do
+ create :ci_runner, description: 'runner-blue', tag_list: ['blue']
+ create :ci_runner, description: 'runner-red', tag_list: ['blue']
+
+ visit admin_runners_path
+
+ input_filtered_search_keys('tag:red')
+
+ expect(page).not_to have_content 'runner-blue'
+ expect(page).not_to have_content 'runner-blue'
+ expect(page).to have_text 'No runners found'
+ end
+
+ it 'shows correct runner when tag is selected and search term is entered' do
+ create :ci_runner, description: 'runner-a-1', tag_list: ['blue']
+ create :ci_runner, description: 'runner-a-2', tag_list: ['red']
+ create :ci_runner, description: 'runner-b-1', tag_list: ['blue']
+
+ visit admin_runners_path
+
+ input_filtered_search_keys('tag:blue')
+
+ expect(page).to have_content 'runner-a-1'
+ expect(page).to have_content 'runner-b-1'
+ expect(page).not_to have_content 'runner-a-2'
+
+ input_filtered_search_keys('tag:blue runner-a')
+
+ expect(page).to have_content 'runner-a-1'
+ expect(page).not_to have_content 'runner-b-1'
+ expect(page).not_to have_content 'runner-a-2'
+ end
+ end
+
it 'sorts by last contact date', :js do
create(:ci_runner, description: 'runner-1', created_at: '2018-07-12 15:37', contacted_at: '2018-07-12 15:37')
create(:ci_runner, description: 'runner-2', created_at: '2018-07-12 16:37', contacted_at: '2018-07-12 16:37')
diff --git a/spec/features/dashboard/activity_spec.rb b/spec/features/dashboard/activity_spec.rb
index bf91dc121d8..c55dc4523f7 100644
--- a/spec/features/dashboard/activity_spec.rb
+++ b/spec/features/dashboard/activity_spec.rb
@@ -39,6 +39,8 @@ describe 'Dashboard > Activity' do
event
end
+ let(:issue) { create(:issue, project: project) }
+
let!(:merged_event) do
create(:event, :merged, project: project, target: merge_request, author: user)
end
@@ -59,6 +61,10 @@ describe 'Dashboard > Activity' do
create(:event, :closed, project: project, target: milestone, author: user)
end
+ let!(:issue_event) do
+ create(:event, :created, project: project, target: issue, author: user)
+ end
+
before do
project.add_maintainer(user)
@@ -74,6 +80,7 @@ describe 'Dashboard > Activity' do
expect(page).to have_content('closed')
expect(page).to have_content('commented on')
expect(page).to have_content('closed milestone')
+ expect(page).to have_content('opened issue')
end
end
@@ -87,6 +94,7 @@ describe 'Dashboard > Activity' do
expect(page).not_to have_content('accepted')
expect(page).not_to have_content('closed')
expect(page).not_to have_content('commented on')
+ expect(page).not_to have_content('opened issue')
end
end
@@ -100,6 +108,7 @@ describe 'Dashboard > Activity' do
expect(page).to have_content('accepted')
expect(page).not_to have_content('closed')
expect(page).not_to have_content('commented on')
+ expect(page).not_to have_content('opened issue')
end
end
@@ -111,9 +120,10 @@ describe 'Dashboard > Activity' do
expect(page).not_to have_content('pushed new branch')
expect(page).not_to have_content('joined')
expect(page).not_to have_content('accepted')
- expect(page).to have_content('closed')
+ expect(page).not_to have_content('closed')
expect(page).not_to have_content('commented on')
- expect(page).to have_content('closed milestone')
+ expect(page).not_to have_content('closed milestone')
+ expect(page).to have_content('opened issue')
end
end
@@ -127,6 +137,7 @@ describe 'Dashboard > Activity' do
expect(page).not_to have_content('accepted')
expect(page).not_to have_content('closed')
expect(page).to have_content('commented on')
+ expect(page).not_to have_content('opened issue')
end
end
@@ -140,6 +151,7 @@ describe 'Dashboard > Activity' do
expect(page).not_to have_content('accepted')
expect(page).not_to have_content('closed')
expect(page).not_to have_content('commented on')
+ expect(page).not_to have_content('opened issue')
end
end
@@ -155,6 +167,7 @@ describe 'Dashboard > Activity' do
expect(page).not_to have_content('accepted')
expect(page).not_to have_content('closed')
expect(page).not_to have_content('commented on')
+ expect(page).not_to have_content('opened issue')
end
end
end
diff --git a/spec/features/group_variables_spec.rb b/spec/features/group_variables_spec.rb
index 57e3ddfb39c..1a53e7c9512 100644
--- a/spec/features/group_variables_spec.rb
+++ b/spec/features/group_variables_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe 'Group variables', :js do
let(:user) { create(:user) }
let(:group) { create(:group) }
- let!(:variable) { create(:ci_group_variable, key: 'test_key', value: 'test value', group: group) }
+ let!(:variable) { create(:ci_group_variable, key: 'test_key', value: 'test_value', group: group) }
let(:page_path) { group_settings_ci_cd_path(group) }
before do
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 8eb413bdd8d..986f3823275 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -278,7 +278,12 @@ describe 'GFM autocomplete', :js do
end
end
- context 'labels' do
+ # This context has just one example in each contexts in order to improve spec performance.
+ context 'labels', :quarantine do
+ let!(:backend) { create(:label, project: project, title: 'backend') }
+ let!(:bug) { create(:label, project: project, title: 'bug') }
+ let!(:feature_proposal) { create(:label, project: project, title: 'feature proposal') }
+
it 'opens autocomplete menu for Labels when field starts with text with item escaping HTML characters' do
create(:label, project: project, title: label_xss_title)
@@ -293,6 +298,83 @@ describe 'GFM autocomplete', :js do
expect(find('.atwho-view-ul').text).to have_content('alert label')
end
end
+
+ context 'when no labels are assigned' do
+ it 'shows labels' do
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+ wait_for_requests
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/label ~".
+ type(note, '/label ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/relabel ~".
+ type(note, '/relabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show no labels on "/unlabel ~".
+ type(note, '/unlabel ~')
+ expect_labels(not_shown: [backend, bug, feature_proposal])
+ end
+ end
+
+ context 'when some labels are assigned' do
+ before do
+ issue.labels << [backend]
+ end
+
+ it 'shows labels' do
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+ wait_for_requests
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show only unset labels on "/label ~".
+ type(note, '/label ~')
+ expect_labels(shown: [bug, feature_proposal], not_shown: [backend])
+
+ # It should show all the labels on "/relabel ~".
+ type(note, '/relabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show only set labels on "/unlabel ~".
+ type(note, '/unlabel ~')
+ expect_labels(shown: [backend], not_shown: [bug, feature_proposal])
+ end
+ end
+
+ context 'when all labels are assigned' do
+ before do
+ issue.labels << [backend, bug, feature_proposal]
+ end
+
+ it 'shows labels' do
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+ wait_for_requests
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show no labels on "/label ~".
+ type(note, '/label ~')
+ expect_labels(not_shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/relabel ~".
+ type(note, '/relabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+
+ # It should show all the labels on "/unlabel ~".
+ type(note, '/unlabel ~')
+ expect_labels(shown: [backend, bug, feature_proposal])
+ end
+ end
end
shared_examples 'autocomplete suggestions' do
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index e60486f6dcb..0f604db870f 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -93,4 +93,22 @@ describe "User creates issue" do
end
end
end
+
+ context "when signed in as user with special characters in their name" do
+ let(:user_special) { create(:user, name: "Jon O'Shea") }
+
+ before do
+ project.add_developer(user_special)
+ sign_in(user_special)
+
+ visit(new_project_issue_path(project))
+ end
+
+ it "will correctly escape user names with an apostrophe when clicking 'Assign to me'", :js do
+ first('.assign-to-me-link').click
+
+ expect(page).to have_content(user_special.name)
+ expect(page.find('input[name="issue[assignee_ids][]"]', visible: false)['data-meta']).to eq(user_special.name)
+ end
+ end
end
diff --git a/spec/features/issues/user_uses_quick_actions_spec.rb b/spec/features/issues/user_uses_quick_actions_spec.rb
index 27cffdc5f8b..b5e7c3954e2 100644
--- a/spec/features/issues/user_uses_quick_actions_spec.rb
+++ b/spec/features/issues/user_uses_quick_actions_spec.rb
@@ -243,7 +243,9 @@ describe 'Issues > User uses quick actions', :js do
it 'does not move the issue' do
add_note("/move not/valid")
- expect(page).not_to have_content 'Commands applied'
+ wait_for_requests
+
+ expect(page).to have_content 'Commands applied'
expect(issue.reload).to be_open
end
end
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
index 406e80e91aa..9bc340ed4bb 100644
--- a/spec/features/issues_spec.rb
+++ b/spec/features/issues_spec.rb
@@ -233,8 +233,8 @@ describe 'Issues' do
created_at: Time.now - (index * 60))
end
end
- let(:newer_due_milestone) { create(:milestone, due_date: '2013-12-11') }
- let(:later_due_milestone) { create(:milestone, due_date: '2013-12-12') }
+ let(:newer_due_milestone) { create(:milestone, project: project, due_date: '2013-12-11') }
+ let(:later_due_milestone) { create(:milestone, project: project, due_date: '2013-12-12') }
it 'sorts by newest' do
visit project_issues_path(project, sort: sort_value_created_date)
diff --git a/spec/features/markdown/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb
index 60ddb02da2c..c30ac9c4ae2 100644
--- a/spec/features/markdown/copy_as_gfm_spec.rb
+++ b/spec/features/markdown/copy_as_gfm_spec.rb
@@ -55,15 +55,10 @@ describe 'Copy as GFM', :js do
To see how GitLab looks please see the [features page on our website](https://about.gitlab.com/features/).
* Manage Git repositories with fine grained access controls that keep your code secure
-
* Perform code reviews and enhance collaboration with merge requests
-
* Complete continuous integration (CI) and CD pipelines to builds, test, and deploy your applications
-
* Each project can also have an issue tracker, issue board, and a wiki
-
* Used by more than 100,000 organizations, GitLab is the most popular solution to manage Git repositories on-premises
-
* Completely free and open source (MIT Expat license)
GFM
)
@@ -116,13 +111,11 @@ describe 'Copy as GFM', :js do
<<~GFM,
* [ ] Unchecked task
-
* [x] Checked task
GFM
<<~GFM
1. [ ] Unchecked ordered task
-
1. [x] Checked ordered task
GFM
)
@@ -551,7 +544,6 @@ describe 'Copy as GFM', :js do
<<~GFM,
* List item
-
* List item 2
GFM
@@ -565,7 +557,6 @@ describe 'Copy as GFM', :js do
# nested lists
<<~GFM,
* Nested
-
* Lists
GFM
@@ -578,7 +569,6 @@ describe 'Copy as GFM', :js do
<<~GFM,
1. Ordered list item
-
1. Ordered list item 2
GFM
@@ -592,7 +582,6 @@ describe 'Copy as GFM', :js do
# nested ordered list
<<~GFM,
1. Nested
-
1. Ordered lists
GFM
diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb
index 7839b97122c..b35f985126c 100644
--- a/spec/features/merge_request/maintainer_edits_fork_spec.rb
+++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb
@@ -18,13 +18,15 @@ describe 'a maintainer edits files on a source-branch of an MR from a fork', :js
end
before do
+ stub_feature_flags(web_ide_default: false)
+
target_project.add_maintainer(user)
sign_in(user)
visit project_merge_request_path(target_project, merge_request)
click_link 'Changes'
wait_for_requests
- first('.js-file-title').click_link 'Edit'
+ first('.js-file-title').find('.js-edit-blob').click
wait_for_requests
end
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 51b78d3e7d1..19edce1b562 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -178,7 +178,7 @@ describe 'Merge request > User posts diff notes', :js do
end
end
- describe 'with muliple note forms' do
+ describe 'with multiple note forms' do
before do
visit diffs_project_merge_request_path(project, merge_request, view: 'inline')
click_diff_line(find('[id="2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9"]'))
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index 1bbcf455ac7..dc0862be6fc 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -67,7 +67,7 @@ describe 'Merge request > User posts notes', :js do
end
end
- describe 'when reply_to_individual_notes feature flag is not set' do
+ describe 'when reply_to_individual_notes feature flag is disabled' do
before do
stub_feature_flags(reply_to_individual_notes: false)
visit project_merge_request_path(project, merge_request)
@@ -78,9 +78,8 @@ describe 'Merge request > User posts notes', :js do
end
end
- describe 'when reply_to_individual_notes feature flag is set' do
+ describe 'when reply_to_individual_notes feature flag is not set' do
before do
- stub_feature_flags(reply_to_individual_notes: true)
visit project_merge_request_path(project, merge_request)
end
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 7b473faa884..97b2aa82fce 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -49,7 +49,7 @@ describe 'Merge request > User sees merge request pipelines', :js do
let!(:merge_request_pipeline) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
- .execute(:merge_request, merge_request: merge_request)
+ .execute(:merge_request_event, merge_request: merge_request)
end
before do
@@ -81,7 +81,7 @@ describe 'Merge request > User sees merge request pipelines', :js do
let!(:merge_request_pipeline_2) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
- .execute(:merge_request, merge_request: merge_request)
+ .execute(:merge_request_event, merge_request: merge_request)
end
before do
@@ -220,7 +220,7 @@ describe 'Merge request > User sees merge request pipelines', :js do
let!(:merge_request_pipeline) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
- .execute(:merge_request, merge_request: merge_request)
+ .execute(:merge_request_event, merge_request: merge_request)
end
let(:forked_project) { fork_project(project, user2, repository: true) }
@@ -263,7 +263,7 @@ describe 'Merge request > User sees merge request pipelines', :js do
let!(:merge_request_pipeline_2) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
- .execute(:merge_request, merge_request: merge_request)
+ .execute(:merge_request_event, merge_request: merge_request)
end
before do
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index aa91ade46ca..5c45e363997 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -1,7 +1,11 @@
require 'rails_helper'
describe 'Merge request > User sees versions', :js do
- let(:merge_request) { create(:merge_request, importing: true) }
+ let(:merge_request) do
+ create(:merge_request).tap do |mr|
+ mr.merge_request_diff.destroy
+ end
+ end
let(:project) { merge_request.source_project }
let(:user) { project.creator }
let!(:merge_request_diff1) { merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
diff --git a/spec/features/merge_requests/user_lists_merge_requests_spec.rb b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
index ef7ae490b0f..c691011b9ca 100644
--- a/spec/features/merge_requests/user_lists_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
@@ -13,7 +13,7 @@ describe 'Merge requests > User lists merge requests' do
source_project: project,
source_branch: 'fix',
assignee: user,
- milestone: create(:milestone, due_date: '2013-12-11'),
+ milestone: create(:milestone, project: project, due_date: '2013-12-11'),
created_at: 1.minute.ago,
updated_at: 1.minute.ago)
create(:merge_request,
@@ -21,7 +21,7 @@ describe 'Merge requests > User lists merge requests' do
source_project: project,
source_branch: 'markdown',
assignee: user,
- milestone: create(:milestone, due_date: '2013-12-12'),
+ milestone: create(:milestone, project: project, due_date: '2013-12-12'),
created_at: 2.minutes.ago,
updated_at: 2.minutes.ago)
create(:merge_request,
diff --git a/spec/features/profiles/active_sessions_spec.rb b/spec/features/profiles/active_sessions_spec.rb
index d3050760c06..2aa0177af5d 100644
--- a/spec/features/profiles/active_sessions_spec.rb
+++ b/spec/features/profiles/active_sessions_spec.rb
@@ -7,6 +7,8 @@ describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state do
end
end
+ let(:admin) { create(:admin) }
+
around do |example|
Timecop.freeze(Time.zone.parse('2018-03-12 09:06')) do
example.run
@@ -16,6 +18,7 @@ describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state do
it 'User sees their active sessions' do
Capybara::Session.new(:session1)
Capybara::Session.new(:session2)
+ Capybara::Session.new(:session3)
# note: headers can only be set on the non-js (aka. rack-test) driver
using_session :session1 do
@@ -37,9 +40,27 @@ describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state do
gitlab_sign_in(user)
end
+ # set an admin session impersonating the user
+ using_session :session3 do
+ Capybara.page.driver.header(
+ 'User-Agent',
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36'
+ )
+
+ gitlab_sign_in(admin)
+
+ visit admin_user_path(user)
+
+ click_link 'Impersonate'
+ end
+
using_session :session1 do
visit profile_active_sessions_path
+ expect(page).to(
+ have_selector('ul.list-group li.list-group-item', { text: 'Signed in on',
+ count: 2 }))
+
expect(page).to have_content(
'127.0.0.1 ' \
'This is your current session ' \
@@ -57,33 +78,8 @@ describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state do
)
expect(page).to have_selector '[title="Smartphone"]', count: 1
- end
- end
-
- it 'User can revoke a session', :js, :redis_session_store do
- Capybara::Session.new(:session1)
- Capybara::Session.new(:session2)
-
- # set an additional session in another browser
- using_session :session2 do
- gitlab_sign_in(user)
- end
-
- using_session :session1 do
- gitlab_sign_in(user)
- visit profile_active_sessions_path
-
- expect(page).to have_link('Revoke', count: 1)
-
- accept_confirm { click_on 'Revoke' }
-
- expect(page).not_to have_link('Revoke')
- end
-
- using_session :session2 do
- visit profile_active_sessions_path
- expect(page).to have_content('You need to sign in or sign up before continuing.')
+ expect(page).not_to have_content('Chrome on Windows')
end
end
end
diff --git a/spec/features/project_variables_spec.rb b/spec/features/project_variables_spec.rb
index a93df3696d2..6bdf5df1036 100644
--- a/spec/features/project_variables_spec.rb
+++ b/spec/features/project_variables_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe 'Project variables', :js do
let(:user) { create(:user) }
let(:project) { create(:project) }
- let(:variable) { create(:ci_variable, key: 'test_key', value: 'test value') }
+ let(:variable) { create(:ci_variable, key: 'test_key', value: 'test_value') }
let(:page_path) { project_settings_ci_cd_path(project) }
before do
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 3edcc7ac2cd..a7aa63018fd 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -548,10 +548,7 @@ describe 'File blob', :js do
it 'displays an auxiliary viewer' do
aggregate_failures do
# shows names of dependency manager and package
- expect(page).to have_content('This project manages its dependencies using RubyGems and defines a gem named activerecord.')
-
- # shows a link to the gem
- expect(page).to have_link('activerecord', href: 'https://rubygems.org/gems/activerecord')
+ expect(page).to have_content('This project manages its dependencies using RubyGems.')
# shows a learn more link
expect(page).to have_link('Learn more', href: 'https://rubygems.org/')
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 828f6f9921e..57d21f3e182 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -49,6 +49,15 @@ describe 'Editing file blob', :js do
end
end
+ it 'updates the content of file with a number as file path' do
+ project.repository.create_file(user, '1', 'test', message: 'testing', branch_name: branch)
+ visit project_blob_path(project, tree_join(branch, '1'))
+
+ edit_and_commit
+
+ expect(page).to have_content 'NextFeature'
+ end
+
context 'from blob file path' do
before do
visit project_blob_path(project, tree_join(branch, file_path))
diff --git a/spec/features/projects/clusters/applications_spec.rb b/spec/features/projects/clusters/applications_spec.rb
index 2c8d014c36d..713e25cdcb2 100644
--- a/spec/features/projects/clusters/applications_spec.rb
+++ b/spec/features/projects/clusters/applications_spec.rb
@@ -17,7 +17,7 @@ describe 'Clusters Applications', :js do
end
context 'when cluster is being created' do
- let(:cluster) { create(:cluster, :providing_by_gcp, projects: [project])}
+ let(:cluster) { create(:cluster, :providing_by_gcp, projects: [project]) }
it 'user is unable to install applications' do
page.within('.js-cluster-application-row-helm') do
@@ -28,9 +28,11 @@ describe 'Clusters Applications', :js do
end
context 'when cluster is created' do
- let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project])}
+ let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
it 'user can install applications' do
+ wait_for_requests
+
page.within('.js-cluster-application-row-helm') do
expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to be_nil
expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install')
@@ -44,6 +46,8 @@ describe 'Clusters Applications', :js do
page.within('.js-cluster-application-row-helm') do
page.find(:css, '.js-cluster-application-install-button').click
end
+
+ wait_for_requests
end
it 'they see status transition' do
@@ -52,8 +56,6 @@ describe 'Clusters Applications', :js do
expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
- wait_until_helm_created!
-
Clusters::Cluster.last.application_helm.make_installing!
# FE starts polling and update the buttons to "Installing"
@@ -76,7 +78,7 @@ describe 'Clusters Applications', :js do
end
context 'on an abac cluster' do
- let(:cluster) { create(:cluster, :provided_by_gcp, :rbac_disabled, projects: [project])}
+ let(:cluster) { create(:cluster, :provided_by_gcp, :rbac_disabled, projects: [project]) }
it 'should show info block and not be installable' do
page.within('.js-cluster-application-row-knative') do
@@ -87,7 +89,7 @@ describe 'Clusters Applications', :js do
end
context 'on an rbac cluster' do
- let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project])}
+ let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
it 'should not show callout block and be installable' do
page.within('.js-cluster-application-row-knative') do
@@ -95,6 +97,60 @@ describe 'Clusters Applications', :js do
expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
end
end
+
+ describe 'when user clicks install button' do
+ def domainname_form_value
+ page.find('.js-knative-domainname').value
+ end
+
+ before do
+ allow(ClusterInstallAppWorker).to receive(:perform_async)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
+ allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
+
+ page.within('.js-cluster-application-row-knative') do
+ expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
+
+ page.find('.js-knative-domainname').set("domain.example.org")
+
+ click_button 'Install'
+
+ wait_for_requests
+
+ expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installing')
+
+ Clusters::Cluster.last.application_knative.make_installing!
+ Clusters::Cluster.last.application_knative.make_installed!
+ Clusters::Cluster.last.application_knative.update_attribute(:external_ip, '127.0.0.1')
+ end
+ end
+
+ it 'shows status transition' do
+ page.within('.js-cluster-application-row-knative') do
+ expect(domainname_form_value).to eq('domain.example.org')
+ expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installed')
+ end
+
+ expect(page).to have_content('Knative was successfully installed on your Kubernetes cluster')
+ expect(page).to have_css('.js-knative-save-domain-button'), exact_text: 'Save changes'
+ end
+
+ it 'can then update the domain' do
+ page.within('.js-cluster-application-row-knative') do
+ expect(ClusterPatchAppWorker).to receive(:perform_async)
+
+ expect(domainname_form_value).to eq('domain.example.org')
+
+ page.find('.js-knative-domainname').set("new.domain.example.org")
+
+ click_button 'Save changes'
+
+ wait_for_requests
+
+ expect(domainname_form_value).to eq('new.domain.example.org')
+ end
+ end
+ end
end
end
@@ -148,6 +204,8 @@ describe 'Clusters Applications', :js do
page.within('.js-cluster-application-row-ingress') do
expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
page.find(:css, '.js-cluster-application-install-button').click
+
+ wait_for_requests
end
end
@@ -184,14 +242,4 @@ describe 'Clusters Applications', :js do
end
end
end
-
- def wait_until_helm_created!
- retries = 0
-
- while Clusters::Cluster.last.application_helm.nil?
- raise "Timed out waiting for helm application to be created in DB" if (retries += 1) > 3
-
- sleep(1)
- end
- end
end
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index a8a3b6910fb..3090f1a2131 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -120,7 +120,7 @@ describe 'Environment' do
end
it 'does show a play button' do
- expect(page).to have_link(action.name.humanize)
+ expect(page).to have_link(action.name)
end
it 'does allow to play manual action', :js do
@@ -128,7 +128,7 @@ describe 'Environment' do
find('button.dropdown').click
- expect { click_link(action.name.humanize) }
+ expect { click_link(action.name) }
.not_to change { Ci::Pipeline.count }
wait_for_all_requests
@@ -140,7 +140,7 @@ describe 'Environment' do
context 'when user has no ability to trigger a deployment' do
it 'does not show a play button' do
- expect(page).not_to have_link(action.name.humanize)
+ expect(page).not_to have_link(action.name)
end
end
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 66c6545204b..b2a435e554d 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -183,14 +183,14 @@ describe 'Environments page', :js do
it 'shows a play button' do
find('.js-environment-actions-dropdown').click
- expect(page).to have_content(action.name.humanize)
+ expect(page).to have_content(action.name)
end
it 'allows to play a manual action', :js do
expect(action).to be_manual
find('.js-environment-actions-dropdown').click
- expect(page).to have_content(action.name.humanize)
+ expect(page).to have_content(action.name)
expect { find('.js-manual-action-link').click }
.not_to change { Ci::Pipeline.count }
@@ -311,7 +311,7 @@ describe 'Environments page', :js do
it "has link to the delayed job's action" do
find('.js-environment-actions-dropdown').click
- expect(page).to have_button('Delayed job')
+ expect(page).to have_button('delayed job')
expect(page).to have_content(/\d{2}:\d{2}:\d{2}/)
end
@@ -333,7 +333,7 @@ describe 'Environments page', :js do
context 'when user played a delayed job immediately' do
before do
find('.js-environment-actions-dropdown').click
- page.accept_confirm { click_button('Delayed job') }
+ page.accept_confirm { click_button('delayed job') }
wait_for_requests
end
diff --git a/spec/features/projects/members/invite_group_spec.rb b/spec/features/projects/members/invite_group_spec.rb
index fceead0b45e..b2d2dba55f1 100644
--- a/spec/features/projects/members/invite_group_spec.rb
+++ b/spec/features/projects/members/invite_group_spec.rb
@@ -27,6 +27,7 @@ describe 'Project > Members > Invite group', :js do
before do
project.add_maintainer(maintainer)
+ group_to_share_with.add_guest(maintainer)
sign_in(maintainer)
end
@@ -112,6 +113,7 @@ describe 'Project > Members > Invite group', :js do
before do
project.add_maintainer(maintainer)
+ group.add_guest(maintainer)
sign_in(maintainer)
visit project_settings_members_path(project)
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 72ef460d315..36b8c15b8b6 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -666,7 +666,7 @@ describe 'Pipeline', :js do
let(:pipeline) do
create(:ci_pipeline,
- source: :merge_request,
+ source: :merge_request_event,
project: merge_request.source_project,
ref: 'feature',
sha: merge_request.diff_head_sha,
diff --git a/spec/features/projects/settings/operations_settings_spec.rb b/spec/features/projects/settings/operations_settings_spec.rb
index 06290c67c70..af56cb0d4ee 100644
--- a/spec/features/projects/settings/operations_settings_spec.rb
+++ b/spec/features/projects/settings/operations_settings_spec.rb
@@ -20,4 +20,81 @@ describe 'Projects > Settings > For a forked project', :js do
expect(page).to have_selector('a[title="Operations"]', visible: false)
end
end
+
+ describe 'Settings > Operations' do
+ context 'error tracking settings form' do
+ let(:sentry_list_projects_url) { 'http://sentry.example.com/api/0/projects/' }
+
+ context 'success path' do
+ let(:projects_sample_response) do
+ Gitlab::Utils.deep_indifferent_access(
+ JSON.parse(fixture_file('sentry/list_projects_sample_response.json'))
+ )
+ end
+
+ before do
+ WebMock.stub_request(:get, sentry_list_projects_url)
+ .to_return(
+ status: 200,
+ headers: { 'Content-Type' => 'application/json' },
+ body: projects_sample_response.to_json
+ )
+ end
+
+ it 'successfully fills and submits the form' do
+ visit project_settings_operations_path(project)
+
+ wait_for_requests
+
+ expect(page).to have_content('Sentry API URL')
+ expect(page.body).to include('Error Tracking')
+ expect(page).to have_button('Connect')
+
+ check('Active')
+ fill_in('error-tracking-api-host', with: 'http://sentry.example.com')
+ fill_in('error-tracking-token', with: 'token')
+
+ click_button('Connect')
+
+ within('div#project-dropdown') do
+ click_button('Select project')
+ click_button('Sentry | Internal')
+ end
+
+ click_button('Save changes')
+
+ wait_for_requests
+
+ assert_text('Your changes have been saved')
+ end
+ end
+
+ context 'project dropdown fails to load' do
+ before do
+ WebMock.stub_request(:get, sentry_list_projects_url)
+ .to_return(
+ status: 400,
+ headers: { 'Content-Type' => 'application/json' },
+ body: {
+ message: 'Sentry response code: 401'
+ }.to_json
+ )
+ end
+
+ it 'displays error message' do
+ visit project_settings_operations_path(project)
+
+ wait_for_requests
+
+ check('Active')
+ fill_in('error-tracking-api-host', with: 'http://sentry.example.com')
+ fill_in('error-tracking-token', with: 'token')
+
+ click_button('Connect')
+
+ assert_text('Connection has failed. Re-check Auth Token and try again.')
+ end
+ end
+ end
+ end
end
diff --git a/spec/features/projects/settings/project_settings_spec.rb b/spec/features/projects/settings/project_settings_spec.rb
new file mode 100644
index 00000000000..7afddc0e712
--- /dev/null
+++ b/spec/features/projects/settings/project_settings_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Projects settings' do
+ set(:project) { create(:project) }
+ let(:user) { project.owner }
+ let(:panel) { find('.general-settings', match: :first) }
+ let(:button) { panel.find('.btn.js-settings-toggle') }
+ let(:title) { panel.find('.settings-title') }
+
+ before do
+ sign_in(user)
+ visit edit_project_path(project)
+ end
+
+ it 'can toggle sections by clicking the title or button', :js do
+ expect_toggle_state(:expanded)
+
+ button.click
+
+ expect_toggle_state(:collapsed)
+
+ button.click
+
+ expect_toggle_state(:expanded)
+
+ title.click
+
+ expect_toggle_state(:collapsed)
+
+ title.click
+
+ expect_toggle_state(:expanded)
+ end
+
+ def expect_toggle_state(state)
+ is_collapsed = state == :collapsed
+
+ expect(button).to have_content(is_collapsed ? 'Expand' : 'Collapse')
+ expect(panel[:class]).send(is_collapsed ? 'not_to' : 'to', include('expanded'))
+ end
+end
diff --git a/spec/features/projects/settings/user_manages_group_links_spec.rb b/spec/features/projects/settings/user_manages_group_links_spec.rb
index 676659b90c3..e5a58c44e41 100644
--- a/spec/features/projects/settings/user_manages_group_links_spec.rb
+++ b/spec/features/projects/settings/user_manages_group_links_spec.rb
@@ -10,6 +10,7 @@ describe 'Projects > Settings > User manages group links' do
before do
project.add_maintainer(user)
+ group_market.add_guest(user)
sign_in(user)
share_link = project.project_group_links.new(group_access: Gitlab::Access::MAINTAINER)
diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb
index ee5734a9bf1..383e8824b7b 100644
--- a/spec/features/projects/user_sees_sidebar_spec.rb
+++ b/spec/features/projects/user_sees_sidebar_spec.rb
@@ -4,6 +4,108 @@ describe 'Projects > User sees sidebar' do
let(:user) { create(:user) }
let(:project) { create(:project, :private, public_builds: false, namespace: user.namespace) }
+ # NOTE: See documented behaviour https://design.gitlab.com/regions/navigation#contextual-navigation
+ context 'on different viewports', :js do
+ include MobileHelpers
+
+ before do
+ sign_in(user)
+ end
+
+ shared_examples 'has a expanded nav sidebar' do
+ it 'has a expanded desktop nav-sidebar on load' do
+ expect(page).to have_content('Collapse sidebar')
+ expect(page).not_to have_selector('.sidebar-collapsed-desktop')
+ expect(page).not_to have_selector('.sidebar-expanded-mobile')
+ end
+
+ it 'can collapse the nav-sidebar' do
+ page.find('.nav-sidebar .js-toggle-sidebar').click
+ expect(page).to have_selector('.sidebar-collapsed-desktop')
+ expect(page).not_to have_content('Collapse sidebar')
+ expect(page).not_to have_selector('.sidebar-expanded-mobile')
+ end
+ end
+
+ shared_examples 'has a collapsed nav sidebar' do
+ it 'has a collapsed desktop nav-sidebar on load' do
+ expect(page).not_to have_content('Collapse sidebar')
+ expect(page).not_to have_selector('.sidebar-expanded-mobile')
+ end
+
+ it 'can expand the nav-sidebar' do
+ page.find('.nav-sidebar .js-toggle-sidebar').click
+ expect(page).to have_selector('.sidebar-expanded-mobile')
+ expect(page).to have_content('Collapse sidebar')
+ end
+ end
+
+ shared_examples 'has a mobile nav-sidebar' do
+ it 'has a hidden nav-sidebar on load' do
+ expect(page).not_to have_content('.mobile-nav-open')
+ expect(page).not_to have_selector('.sidebar-expanded-mobile')
+ end
+
+ it 'can expand the nav-sidebar' do
+ page.find('.toggle-mobile-nav').click
+ expect(page).to have_selector('.mobile-nav-open')
+ expect(page).to have_selector('.sidebar-expanded-mobile')
+ end
+ end
+
+ context 'with a extra small viewport' do
+ before do
+ resize_screen_xs
+ visit project_path(project)
+ expect(page).to have_selector('.nav-sidebar')
+ expect(page).to have_selector('.toggle-mobile-nav')
+ end
+
+ it_behaves_like 'has a mobile nav-sidebar'
+ end
+
+ context 'with a small size viewport' do
+ before do
+ resize_screen_sm
+ visit project_path(project)
+ expect(page).to have_selector('.nav-sidebar')
+ expect(page).to have_selector('.toggle-mobile-nav')
+ end
+
+ it_behaves_like 'has a mobile nav-sidebar'
+ end
+
+ context 'with medium size viewport' do
+ before do
+ resize_window(768, 800)
+ visit project_path(project)
+ expect(page).to have_selector('.nav-sidebar')
+ end
+
+ it_behaves_like 'has a collapsed nav sidebar'
+ end
+
+ context 'with viewport size 1199px' do
+ before do
+ resize_window(1199, 800)
+ visit project_path(project)
+ expect(page).to have_selector('.nav-sidebar')
+ end
+
+ it_behaves_like 'has a collapsed nav sidebar'
+ end
+
+ context 'with a extra large viewport' do
+ before do
+ resize_window(1200, 800)
+ visit project_path(project)
+ expect(page).to have_selector('.nav-sidebar')
+ end
+
+ it_behaves_like 'has a expanded nav sidebar'
+ end
+ end
+
context 'as owner' do
before do
sign_in(user)
diff --git a/spec/features/security/group/private_access_spec.rb b/spec/features/security/group/private_access_spec.rb
index 4705cd12d23..3238e07fe15 100644
--- a/spec/features/security/group/private_access_spec.rb
+++ b/spec/features/security/group/private_access_spec.rb
@@ -27,7 +27,7 @@ describe 'Private Group access' do
it { is_expected.to be_allowed_for(:developer).of(group) }
it { is_expected.to be_allowed_for(:reporter).of(group) }
it { is_expected.to be_allowed_for(:guest).of(group) }
- it { is_expected.to be_allowed_for(project_guest) }
+ it { is_expected.to be_denied_for(project_guest) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
@@ -42,7 +42,7 @@ describe 'Private Group access' do
it { is_expected.to be_allowed_for(:developer).of(group) }
it { is_expected.to be_allowed_for(:reporter).of(group) }
it { is_expected.to be_allowed_for(:guest).of(group) }
- it { is_expected.to be_allowed_for(project_guest) }
+ it { is_expected.to be_denied_for(project_guest) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
@@ -58,7 +58,7 @@ describe 'Private Group access' do
it { is_expected.to be_allowed_for(:developer).of(group) }
it { is_expected.to be_allowed_for(:reporter).of(group) }
it { is_expected.to be_allowed_for(:guest).of(group) }
- it { is_expected.to be_allowed_for(project_guest) }
+ it { is_expected.to be_denied_for(project_guest) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
@@ -73,7 +73,7 @@ describe 'Private Group access' do
it { is_expected.to be_allowed_for(:developer).of(group) }
it { is_expected.to be_allowed_for(:reporter).of(group) }
it { is_expected.to be_allowed_for(:guest).of(group) }
- it { is_expected.to be_allowed_for(project_guest) }
+ it { is_expected.to be_denied_for(project_guest) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
@@ -93,4 +93,28 @@ describe 'Private Group access' do
it { is_expected.to be_denied_for(:visitor) }
it { is_expected.to be_denied_for(:external) }
end
+
+ describe 'GET /groups/:path for shared projects' do
+ let(:project) { create(:project, :public) }
+ before do
+ Projects::GroupLinks::CreateService.new(
+ project,
+ create(:user),
+ link_group_access: ProjectGroupLink::DEVELOPER
+ ).execute(group)
+ end
+
+ subject { group_path(group) }
+
+ it { is_expected.to be_allowed_for(:admin) }
+ it { is_expected.to be_allowed_for(:owner).of(group) }
+ it { is_expected.to be_allowed_for(:maintainer).of(group) }
+ it { is_expected.to be_allowed_for(:developer).of(group) }
+ it { is_expected.to be_allowed_for(:reporter).of(group) }
+ it { is_expected.to be_allowed_for(:guest).of(group) }
+ it { is_expected.to be_denied_for(project_guest) }
+ it { is_expected.to be_denied_for(:user) }
+ it { is_expected.to be_denied_for(:external) }
+ it { is_expected.to be_denied_for(:visitor) }
+ end
end
diff --git a/spec/finders/admin/runners_finder_spec.rb b/spec/finders/admin/runners_finder_spec.rb
index 0b2325cc7ca..94ccb398801 100644
--- a/spec/finders/admin/runners_finder_spec.rb
+++ b/spec/finders/admin/runners_finder_spec.rb
@@ -37,6 +37,14 @@ describe Admin::RunnersFinder do
end
end
+ context 'filter by tag_name' do
+ it 'calls the corresponding scope on Ci::Runner' do
+ expect(Ci::Runner).to receive(:tagged_with).with(%w[tag1 tag2]).and_call_original
+
+ described_class.new(params: { tag_name: %w[tag1 tag2] }).execute
+ end
+ end
+
context 'sort' do
context 'without sort param' do
it 'sorts by created_at' do
diff --git a/spec/finders/autocomplete/acts_as_taggable_on/tags_finder_spec.rb b/spec/finders/autocomplete/acts_as_taggable_on/tags_finder_spec.rb
new file mode 100644
index 00000000000..79d2f9cdb45
--- /dev/null
+++ b/spec/finders/autocomplete/acts_as_taggable_on/tags_finder_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Autocomplete::ActsAsTaggableOn::TagsFinder do
+ describe '#execute' do
+ context 'with empty params' do
+ it 'returns all tags' do
+ tag1 = ActsAsTaggableOn::Tag.create!(name: 'tag1')
+ tag2 = ActsAsTaggableOn::Tag.create!(name: 'tag2')
+
+ tags = described_class.new(params: {}).execute
+
+ expect(tags).to match_array [tag1, tag2]
+ end
+ end
+
+ context 'filter by search' do
+ context 'with an empty search term' do
+ it 'returns an empty collection' do
+ ActsAsTaggableOn::Tag.create!(name: 'tag1')
+ ActsAsTaggableOn::Tag.create!(name: 'tag2')
+
+ tags = described_class.new(params: { search: '' }).execute
+
+ expect(tags).to be_empty
+ end
+ end
+
+ context 'with a search containing 2 characters' do
+ it 'returns the tag that strictly matches the search term' do
+ tag1 = ActsAsTaggableOn::Tag.create!(name: 't1')
+ ActsAsTaggableOn::Tag.create!(name: 't11')
+
+ tags = described_class.new(params: { search: 't1' }).execute
+
+ expect(tags).to match_array [tag1]
+ end
+ end
+
+ context 'with a search containing 3 characters' do
+ it 'returns the tag that partially matches the search term' do
+ tag1 = ActsAsTaggableOn::Tag.create!(name: 'tag1')
+ tag2 = ActsAsTaggableOn::Tag.create!(name: 'tag11')
+
+ tags = described_class.new(params: { search: 'ag1' }).execute
+
+ expect(tags).to match_array [tag1, tag2]
+ end
+ end
+ end
+
+ context 'limit' do
+ it 'limits the result set by the limit constant' do
+ stub_const("#{described_class}::LIMIT", 1)
+
+ ActsAsTaggableOn::Tag.create!(name: 'tag1')
+ ActsAsTaggableOn::Tag.create!(name: 'tag2')
+
+ tags = described_class.new(params: { search: 'tag' }).execute
+
+ expect(tags.count).to eq 1
+ end
+ end
+ end
+end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 47e2548c3d6..55efab7dec3 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -715,7 +715,7 @@ describe IssuesFinder do
before do
allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
- stub_feature_flags(use_subquery_for_group_issues_search: true)
+ stub_feature_flags(attempt_group_search_optimizations: true)
end
context 'when there is no search param' do
@@ -746,11 +746,11 @@ describe IssuesFinder do
end
end
- context 'when the use_subquery_for_group_issues_search flag is disabled' do
+ context 'when the attempt_group_search_optimizations flag is disabled' do
let(:params) { { search: 'foo', attempt_group_search_optimizations: true } }
before do
- stub_feature_flags(use_subquery_for_group_issues_search: false)
+ stub_feature_flags(attempt_group_search_optimizations: false)
end
it 'returns false' do
@@ -758,6 +758,14 @@ describe IssuesFinder do
end
end
+ context 'when force_cte? is true' do
+ let(:params) { { search: 'foo', attempt_group_search_optimizations: true, force_cte: true } }
+
+ it 'returns false' do
+ expect(finder.use_subquery_for_search?).to be_falsey
+ end
+ end
+
context 'when all conditions are met' do
let(:params) { { search: 'foo', attempt_group_search_optimizations: true } }
@@ -767,72 +775,59 @@ describe IssuesFinder do
end
end
- describe '#use_cte_for_search?' do
+ describe '#use_cte_for_count?' do
let(:finder) { described_class.new(nil, params) }
before do
allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
- stub_feature_flags(use_cte_for_group_issues_search: true)
- stub_feature_flags(use_subquery_for_group_issues_search: false)
+ stub_feature_flags(attempt_group_search_optimizations: true)
end
context 'when there is no search param' do
- let(:params) { { attempt_group_search_optimizations: true } }
+ let(:params) { { attempt_group_search_optimizations: true, force_cte: true } }
it 'returns false' do
- expect(finder.use_cte_for_search?).to be_falsey
+ expect(finder.use_cte_for_count?).to be_falsey
end
end
context 'when the database is not Postgres' do
- let(:params) { { search: 'foo', attempt_group_search_optimizations: true } }
+ let(:params) { { search: 'foo', force_cte: true, attempt_group_search_optimizations: true } }
before do
allow(Gitlab::Database).to receive(:postgresql?).and_return(false)
end
it 'returns false' do
- expect(finder.use_cte_for_search?).to be_falsey
+ expect(finder.use_cte_for_count?).to be_falsey
end
end
- context 'when the attempt_group_search_optimizations param is falsey' do
+ context 'when the force_cte param is falsey' do
let(:params) { { search: 'foo' } }
it 'returns false' do
- expect(finder.use_cte_for_search?).to be_falsey
- end
- end
-
- context 'when the use_cte_for_group_issues_search flag is disabled' do
- let(:params) { { search: 'foo', attempt_group_search_optimizations: true } }
-
- before do
- stub_feature_flags(use_cte_for_group_issues_search: false)
- end
-
- it 'returns false' do
- expect(finder.use_cte_for_search?).to be_falsey
+ expect(finder.use_cte_for_count?).to be_falsey
end
end
- context 'when use_subquery_for_search? is true' do
- let(:params) { { search: 'foo', attempt_group_search_optimizations: true } }
+ context 'when the attempt_group_search_optimizations flag is disabled' do
+ let(:params) { { search: 'foo', force_cte: true, attempt_group_search_optimizations: true } }
before do
- stub_feature_flags(use_subquery_for_group_issues_search: true)
+ stub_feature_flags(attempt_group_search_optimizations: false)
end
it 'returns false' do
- expect(finder.use_cte_for_search?).to be_falsey
+ expect(finder.use_cte_for_count?).to be_falsey
end
end
context 'when all conditions are met' do
- let(:params) { { search: 'foo', attempt_group_search_optimizations: true } }
+ let(:params) { { search: 'foo', force_cte: true, attempt_group_search_optimizations: true } }
it 'returns true' do
- expect(finder.use_cte_for_search?).to be_truthy
+ expect(finder.use_cte_for_count?).to be_truthy
end
end
end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 107da08a0a9..503b88fcbad 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -13,275 +13,409 @@ describe MergeRequestsFinder do
end
end
- let(:user) { create :user }
- let(:user2) { create :user }
-
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
- let(:project1) { create_project_without_n_plus_1(group: group) }
- let(:project2) do
- Gitlab::GitalyClient.allow_n_plus_1_calls do
- fork_project(project1, user)
+ context "multiple projects with merge requests" do
+ let(:user) { create :user }
+ let(:user2) { create :user }
+
+ let(:group) { create(:group) }
+ let(:subgroup) { create(:group, parent: group) }
+ let(:project1) { create_project_without_n_plus_1(group: group) }
+ let(:project2) do
+ Gitlab::GitalyClient.allow_n_plus_1_calls do
+ fork_project(project1, user)
+ end
end
- end
- let(:project3) do
- Gitlab::GitalyClient.allow_n_plus_1_calls do
- p = fork_project(project1, user)
- p.update!(archived: true)
- p
+ let(:project3) do
+ Gitlab::GitalyClient.allow_n_plus_1_calls do
+ p = fork_project(project1, user)
+ p.update!(archived: true)
+ p
+ end
end
- end
- let(:project4) { create_project_without_n_plus_1(group: subgroup) }
- let(:project5) { create_project_without_n_plus_1(group: subgroup) }
- let(:project6) { create_project_without_n_plus_1(group: subgroup) }
-
- let!(:merge_request1) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1) }
- let!(:merge_request2) { create(:merge_request, :conflict, author: user, source_project: project2, target_project: project1, state: 'closed') }
- let!(:merge_request3) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project2, state: 'locked', title: 'thing WIP thing') }
- let!(:merge_request4) { create(:merge_request, :simple, author: user, source_project: project3, target_project: project3, title: 'WIP thing') }
- let!(:merge_request5) { create(:merge_request, :simple, author: user, source_project: project4, target_project: project4, title: '[WIP]') }
- let!(:merge_request6) { create(:merge_request, :simple, author: user, source_project: project5, target_project: project5, title: 'WIP: thing') }
- let!(:merge_request7) { create(:merge_request, :simple, author: user, source_project: project6, target_project: project6, title: 'wip thing') }
- let!(:merge_request8) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project1, title: '[wip] thing') }
- let!(:merge_request9) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project2, title: 'wip: thing') }
-
- before do
- project1.add_maintainer(user)
- project2.add_developer(user)
- project3.add_developer(user)
- project2.add_developer(user2)
- project4.add_developer(user)
- project5.add_developer(user)
- project6.add_developer(user)
- end
-
- describe "#execute" do
- it 'filters by scope' do
- params = { scope: 'authored', state: 'opened' }
- merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(7)
+ let(:project4) { create_project_without_n_plus_1(:repository, group: subgroup) }
+ let(:project5) { create_project_without_n_plus_1(group: subgroup) }
+ let(:project6) { create_project_without_n_plus_1(group: subgroup) }
+
+ let!(:merge_request1) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1) }
+ let!(:merge_request2) { create(:merge_request, :conflict, author: user, source_project: project2, target_project: project1, state: 'closed') }
+ let!(:merge_request3) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project2, state: 'locked', title: 'thing WIP thing') }
+ let!(:merge_request4) { create(:merge_request, :simple, author: user, source_project: project3, target_project: project3, title: 'WIP thing') }
+ let!(:merge_request5) { create(:merge_request, :simple, author: user, source_project: project4, target_project: project4, title: '[WIP]') }
+ let!(:merge_request6) { create(:merge_request, :simple, author: user, source_project: project5, target_project: project5, title: 'WIP: thing') }
+ let!(:merge_request7) { create(:merge_request, :simple, author: user, source_project: project6, target_project: project6, title: 'wip thing') }
+ let!(:merge_request8) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project1, title: '[wip] thing') }
+ let!(:merge_request9) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project2, title: 'wip: thing') }
+
+ before do
+ project1.add_maintainer(user)
+ project2.add_developer(user)
+ project3.add_developer(user)
+ project2.add_developer(user2)
+ project4.add_developer(user)
+ project5.add_developer(user)
+ project6.add_developer(user)
end
- it 'filters by project' do
- params = { project_id: project1.id, scope: 'authored', state: 'opened' }
- merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(2)
- end
+ describe '#execute' do
+ it 'filters by scope' do
+ params = { scope: 'authored', state: 'opened' }
+ merge_requests = described_class.new(user, params).execute
+ expect(merge_requests.size).to eq(7)
+ end
+
+ it 'filters by project' do
+ params = { project_id: project1.id, scope: 'authored', state: 'opened' }
+ merge_requests = described_class.new(user, params).execute
+ expect(merge_requests.size).to eq(2)
+ end
+
+ it 'filters by commit sha' do
+ merge_requests = described_class.new(
+ user,
+ commit_sha: merge_request5.merge_request_diff.last_commit_sha
+ ).execute
+
+ expect(merge_requests).to contain_exactly(merge_request5)
+ end
+
+ context 'filtering by group' do
+ it 'includes all merge requests when user has access' do
+ params = { group_id: group.id }
+
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests.size).to eq(3)
+ end
- context 'filtering by group' do
- it 'includes all merge requests when user has access' do
- params = { group_id: group.id }
+ it 'excludes merge requests from projects the user does not have access to' do
+ private_project = create_project_without_n_plus_1(:private, group: group)
+ private_mr = create(:merge_request, :simple, author: user, source_project: private_project, target_project: private_project)
+ params = { group_id: group.id }
+ private_project.add_guest(user)
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests.size).to eq(3)
+ expect(merge_requests).not_to include(private_mr)
+ end
+
+ it 'filters by group including subgroups', :nested_groups do
+ params = { group_id: group.id, include_subgroups: true }
+
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests.size).to eq(6)
+ end
+ end
+
+ it 'filters by non_archived' do
+ params = { non_archived: true }
merge_requests = described_class.new(user, params).execute
+ expect(merge_requests.size).to eq(8)
+ end
+
+ it 'filters by iid' do
+ params = { project_id: project1.id, iids: merge_request1.iid }
- expect(merge_requests.size).to eq(3)
+ merge_requests = described_class.new(user, params).execute
+
+ expect(merge_requests).to contain_exactly(merge_request1)
end
- it 'excludes merge requests from projects the user does not have access to' do
- private_project = create_project_without_n_plus_1(:private, group: group)
- private_mr = create(:merge_request, :simple, author: user, source_project: private_project, target_project: private_project)
- params = { group_id: group.id }
+ it 'filters by source branch' do
+ params = { source_branch: merge_request2.source_branch }
- private_project.add_guest(user)
merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(3)
- expect(merge_requests).not_to include(private_mr)
+ expect(merge_requests).to contain_exactly(merge_request2)
end
- it 'filters by group including subgroups', :nested_groups do
- params = { group_id: group.id, include_subgroups: true }
+ it 'filters by target branch' do
+ params = { target_branch: merge_request2.target_branch }
merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(6)
+ expect(merge_requests).to contain_exactly(merge_request2)
end
- end
- it 'filters by non_archived' do
- params = { non_archived: true }
- merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(8)
- end
+ it 'filters by state' do
+ params = { state: 'locked' }
- it 'filters by iid' do
- params = { project_id: project1.id, iids: merge_request1.iid }
+ merge_requests = described_class.new(user, params).execute
- merge_requests = described_class.new(user, params).execute
+ expect(merge_requests).to contain_exactly(merge_request3)
+ end
- expect(merge_requests).to contain_exactly(merge_request1)
- end
+ it 'filters by wip' do
+ params = { wip: 'yes' }
- it 'filters by source branch' do
- params = { source_branch: merge_request2.source_branch }
+ merge_requests = described_class.new(user, params).execute
- merge_requests = described_class.new(user, params).execute
+ expect(merge_requests).to contain_exactly(merge_request4, merge_request5, merge_request6, merge_request7, merge_request8, merge_request9)
+ end
- expect(merge_requests).to contain_exactly(merge_request2)
- end
+ it 'filters by not wip' do
+ params = { wip: 'no' }
- it 'filters by target branch' do
- params = { target_branch: merge_request2.target_branch }
+ merge_requests = described_class.new(user, params).execute
- merge_requests = described_class.new(user, params).execute
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3)
+ end
- expect(merge_requests).to contain_exactly(merge_request2)
- end
+ it 'returns all items if no valid wip param exists' do
+ params = { wip: '' }
- it 'filters by state' do
- params = { state: 'locked' }
+ merge_requests = described_class.new(user, params).execute
- merge_requests = described_class.new(user, params).execute
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3, merge_request4, merge_request5, merge_request6, merge_request7, merge_request8, merge_request9)
+ end
- expect(merge_requests).to contain_exactly(merge_request3)
- end
+ it 'adds wip to scalar params' do
+ scalar_params = described_class.scalar_params
- it 'filters by wip' do
- params = { wip: 'yes' }
+ expect(scalar_params).to include(:wip, :assignee_id)
+ end
- merge_requests = described_class.new(user, params).execute
+ context 'filtering by group milestone' do
+ let!(:group) { create(:group, :public) }
+ let(:group_milestone) { create(:milestone, group: group) }
+ let!(:group_member) { create(:group_member, group: group, user: user) }
+ let(:params) { { milestone_title: group_milestone.title } }
- expect(merge_requests).to contain_exactly(merge_request4, merge_request5, merge_request6, merge_request7, merge_request8, merge_request9)
- end
+ before do
+ project2.update(namespace: group)
+ merge_request2.update(milestone: group_milestone)
+ merge_request3.update(milestone: group_milestone)
+ end
- it 'filters by not wip' do
- params = { wip: 'no' }
+ it 'returns issues assigned to that group milestone' do
+ merge_requests = described_class.new(user, params).execute
- merge_requests = described_class.new(user, params).execute
+ expect(merge_requests).to contain_exactly(merge_request2, merge_request3)
+ end
+ end
- expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3)
- end
+ context 'filtering by created_at/updated_at' do
+ let(:new_project) { create(:project, forked_from_project: project1) }
- it 'returns all items if no valid wip param exists' do
- params = { wip: '' }
+ let!(:new_merge_request) do
+ create(:merge_request,
+ :simple,
+ author: user,
+ created_at: 1.week.from_now,
+ updated_at: 1.week.from_now,
+ source_project: new_project,
+ target_project: new_project)
+ end
- merge_requests = described_class.new(user, params).execute
+ let!(:old_merge_request) do
+ create(:merge_request,
+ :simple,
+ author: user,
+ source_branch: 'feature_1',
+ created_at: 1.week.ago,
+ updated_at: 1.week.ago,
+ source_project: new_project,
+ target_project: new_project)
+ end
- expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3, merge_request4, merge_request5, merge_request6, merge_request7, merge_request8, merge_request9)
- end
+ before do
+ new_project.add_maintainer(user)
+ end
- it 'adds wip to scalar params' do
- scalar_params = described_class.scalar_params
+ it 'filters by created_after' do
+ params = { project_id: new_project.id, created_after: new_merge_request.created_at }
- expect(scalar_params).to include(:wip, :assignee_id)
- end
+ merge_requests = described_class.new(user, params).execute
- context 'filtering by group milestone' do
- let!(:group) { create(:group, :public) }
- let(:group_milestone) { create(:milestone, group: group) }
- let!(:group_member) { create(:group_member, group: group, user: user) }
- let(:params) { { milestone_title: group_milestone.title } }
+ expect(merge_requests).to contain_exactly(new_merge_request)
+ end
- before do
- project2.update(namespace: group)
- merge_request2.update(milestone: group_milestone)
- merge_request3.update(milestone: group_milestone)
- end
+ it 'filters by created_before' do
+ params = { project_id: new_project.id, created_before: old_merge_request.created_at }
- it 'returns issues assigned to that group milestone' do
- merge_requests = described_class.new(user, params).execute
+ merge_requests = described_class.new(user, params).execute
- expect(merge_requests).to contain_exactly(merge_request2, merge_request3)
- end
- end
+ expect(merge_requests).to contain_exactly(old_merge_request)
+ end
- context 'filtering by created_at/updated_at' do
- let(:new_project) { create(:project, forked_from_project: project1) }
-
- let!(:new_merge_request) do
- create(:merge_request,
- :simple,
- author: user,
- created_at: 1.week.from_now,
- updated_at: 1.week.from_now,
- source_project: new_project,
- target_project: new_project)
- end
+ it 'filters by created_after and created_before' do
+ params = {
+ project_id: new_project.id,
+ created_after: old_merge_request.created_at,
+ created_before: new_merge_request.created_at
+ }
- let!(:old_merge_request) do
- create(:merge_request,
- :simple,
- author: user,
- source_branch: 'feature_1',
- created_at: 1.week.ago,
- updated_at: 1.week.ago,
- source_project: new_project,
- target_project: new_project)
- end
+ merge_requests = described_class.new(user, params).execute
- before do
- new_project.add_maintainer(user)
- end
+ expect(merge_requests).to contain_exactly(old_merge_request, new_merge_request)
+ end
- it 'filters by created_after' do
- params = { project_id: new_project.id, created_after: new_merge_request.created_at }
+ it 'filters by updated_after' do
+ params = { project_id: new_project.id, updated_after: new_merge_request.updated_at }
- merge_requests = described_class.new(user, params).execute
+ merge_requests = described_class.new(user, params).execute
- expect(merge_requests).to contain_exactly(new_merge_request)
- end
+ expect(merge_requests).to contain_exactly(new_merge_request)
+ end
- it 'filters by created_before' do
- params = { project_id: new_project.id, created_before: old_merge_request.created_at }
+ it 'filters by updated_before' do
+ params = { project_id: new_project.id, updated_before: old_merge_request.updated_at }
- merge_requests = described_class.new(user, params).execute
+ merge_requests = described_class.new(user, params).execute
- expect(merge_requests).to contain_exactly(old_merge_request)
- end
+ expect(merge_requests).to contain_exactly(old_merge_request)
+ end
- it 'filters by created_after and created_before' do
- params = {
- project_id: new_project.id,
- created_after: old_merge_request.created_at,
- created_before: new_merge_request.created_at
- }
+ it 'filters by updated_after and updated_before' do
+ params = {
+ project_id: new_project.id,
+ updated_after: old_merge_request.updated_at,
+ updated_before: new_merge_request.updated_at
+ }
- merge_requests = described_class.new(user, params).execute
+ merge_requests = described_class.new(user, params).execute
- expect(merge_requests).to contain_exactly(old_merge_request, new_merge_request)
+ expect(merge_requests).to contain_exactly(old_merge_request, new_merge_request)
+ end
end
+ end
- it 'filters by updated_after' do
- params = { project_id: new_project.id, updated_after: new_merge_request.updated_at }
+ describe '#row_count', :request_store do
+ it 'returns the number of rows for the default state' do
+ finder = described_class.new(user)
- merge_requests = described_class.new(user, params).execute
+ expect(finder.row_count).to eq(7)
+ end
- expect(merge_requests).to contain_exactly(new_merge_request)
+ it 'returns the number of rows for a given state' do
+ finder = described_class.new(user, state: 'closed')
+
+ expect(finder.row_count).to eq(1)
end
+ end
+ end
- it 'filters by updated_before' do
- params = { project_id: new_project.id, updated_before: old_merge_request.updated_at }
+ context 'when projects require different access levels for merge requests' do
+ let(:user) { create(:user) }
- merge_requests = described_class.new(user, params).execute
+ let(:public_project) { create(:project, :public) }
+ let(:internal) { create(:project, :internal) }
+ let(:private_project) { create(:project, :private) }
+ let(:public_with_private_repo) { create(:project, :public, :repository, :repository_private) }
+ let(:internal_with_private_repo) { create(:project, :internal, :repository, :repository_private) }
+
+ let(:merge_requests) { described_class.new(user, {}).execute }
+
+ let!(:mr_public) { create(:merge_request, source_project: public_project) }
+ let!(:mr_private) { create(:merge_request, source_project: private_project) }
+ let!(:mr_internal) { create(:merge_request, source_project: internal) }
+ let!(:mr_private_repo_access) { create(:merge_request, source_project: public_with_private_repo) }
+ let!(:mr_internal_private_repo_access) { create(:merge_request, source_project: internal_with_private_repo) }
+
+ context 'with admin user' do
+ let(:user) { create(:user, :admin) }
- expect(merge_requests).to contain_exactly(old_merge_request)
+ it 'returns all merge requests' do
+ expect(merge_requests).to eq(
+ [mr_internal_private_repo_access, mr_private_repo_access, mr_internal, mr_private, mr_public]
+ )
end
+ end
- it 'filters by updated_after and updated_before' do
- params = {
- project_id: new_project.id,
- updated_after: old_merge_request.updated_at,
- updated_before: new_merge_request.updated_at
- }
+ context 'when project restricts merge requests' do
+ let(:non_member) { create(:user) }
+ let(:project) { create(:project, :repository, :public, :merge_requests_private) }
+ let!(:merge_request) { create(:merge_request, source_project: project) }
- merge_requests = described_class.new(user, params).execute
+ it "returns nothing to to non members" do
+ merge_requests = described_class.new(
+ non_member,
+ project_id: project.id
+ ).execute
- expect(merge_requests).to contain_exactly(old_merge_request, new_merge_request)
+ expect(merge_requests).to be_empty
end
end
- end
- describe '#row_count', :request_store do
- it 'returns the number of rows for the default state' do
- finder = described_class.new(user)
+ context 'with external user' do
+ let(:user) { create(:user, :external) }
- expect(finder.row_count).to eq(7)
+ it 'returns only public merge requests' do
+ expect(merge_requests).to eq([mr_public])
+ end
end
- it 'returns the number of rows for a given state' do
- finder = described_class.new(user, state: 'closed')
+ context 'with authenticated user' do
+ it 'returns public and internal merge requests' do
+ expect(merge_requests).to eq([mr_internal, mr_public])
+ end
+
+ context 'being added to the private project' do
+ context 'as a guest' do
+ before do
+ private_project.add_guest(user)
+ end
+
+ it 'does not return merge requests from the private project' do
+ expect(merge_requests).to eq([mr_internal, mr_public])
+ end
+ end
+
+ context 'as a developer' do
+ before do
+ private_project.add_developer(user)
+ end
+
+ it 'returns merge requests from the private project' do
+ expect(merge_requests).to eq([mr_internal, mr_private, mr_public])
+ end
+ end
+ end
- expect(finder.row_count).to eq(1)
+ context 'being added to the public project with private repo access' do
+ context 'as a guest' do
+ before do
+ public_with_private_repo.add_guest(user)
+ end
+
+ it 'returns merge requests from the project' do
+ expect(merge_requests).to eq([mr_internal, mr_public])
+ end
+ end
+
+ context 'as a reporter' do
+ before do
+ public_with_private_repo.add_reporter(user)
+ end
+
+ it 'returns merge requests from the project' do
+ expect(merge_requests).to eq([mr_private_repo_access, mr_internal, mr_public])
+ end
+ end
+ end
+
+ context 'being added to the internal project with private repo access' do
+ context 'as a guest' do
+ before do
+ internal_with_private_repo.add_guest(user)
+ end
+
+ it 'returns merge requests from the project' do
+ expect(merge_requests).to eq([mr_internal, mr_public])
+ end
+ end
+
+ context 'as a reporter' do
+ before do
+ internal_with_private_repo.add_reporter(user)
+ end
+
+ it 'returns merge requests from the project' do
+ expect(merge_requests).to eq([mr_internal_private_repo_access, mr_internal, mr_public])
+ end
+ end
+ end
end
end
end
diff --git a/spec/fixtures/security-reports/remediations/gl-dependency-scanning-report.json b/spec/fixtures/security-reports/remediations/gl-dependency-scanning-report.json
new file mode 100644
index 00000000000..c96e831b027
--- /dev/null
+++ b/spec/fixtures/security-reports/remediations/gl-dependency-scanning-report.json
@@ -0,0 +1,104 @@
+{
+ "version": "2.0",
+ "vulnerabilities": [
+ {
+ "category": "dependency_scanning",
+ "name": "Regular Expression Denial of Service",
+ "message": "Regular Expression Denial of Service in debug",
+ "description": "The debug module is vulnerable to regular expression denial of service when untrusted user input is passed into the `o` formatter. It takes around 50k characters to block for 2 seconds making this a low severity issue.",
+ "cve": "yarn.lock:debug:gemnasium:37283ed4-0380-40d7-ada7-2d994afcc62a",
+ "severity": "Unknown",
+ "solution": "Upgrade to latest versions.",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {
+ "file": "yarn.lock",
+ "dependency": {
+ "package": {
+ "name": "debug"
+ },
+ "version": "1.0.5"
+ }
+ },
+ "identifiers": [
+ {
+ "type": "gemnasium",
+ "name": "Gemnasium-37283ed4-0380-40d7-ada7-2d994afcc62a",
+ "value": "37283ed4-0380-40d7-ada7-2d994afcc62a",
+ "url": "https://deps.sec.gitlab.com/packages/npm/debug/versions/1.0.5/advisories"
+ }
+ ],
+ "links": [
+ {
+ "url": "https://nodesecurity.io/advisories/534"
+ },
+ {
+ "url": "https://github.com/visionmedia/debug/issues/501"
+ },
+ {
+ "url": "https://github.com/visionmedia/debug/pull/504"
+ }
+ ]
+ },
+ {
+ "category": "dependency_scanning",
+ "name": "Authentication bypass via incorrect DOM traversal and canonicalization",
+ "message": "Authentication bypass via incorrect DOM traversal and canonicalization in saml2-js",
+ "description": "Some XML DOM traversal and canonicalization APIs may be inconsistent in handling of comments within XML nodes. Incorrect use of these APIs by some SAML libraries results in incorrect parsing of the inner text of XML nodes such that any inner text after the comment is lost prior to cryptographically signing the SAML message. Text after the comment therefore has no impact on the signature on the SAML message.\r\n\r\nA remote attacker can modify SAML content for a SAML service provider without invalidating the cryptographic signature, which may allow attackers to bypass primary authentication for the affected SAML service provider.",
+ "cve": "yarn.lock:saml2-js:gemnasium:9952e574-7b5b-46fa-a270-aeb694198a98",
+ "severity": "Unknown",
+ "solution": "Upgrade to fixed version.\r\n",
+ "scanner": {
+ "id": "gemnasium",
+ "name": "Gemnasium"
+ },
+ "location": {
+ "file": "yarn.lock",
+ "dependency": {
+ "package": {
+ "name": "saml2-js"
+ },
+ "version": "1.5.0"
+ }
+ },
+ "identifiers": [
+ {
+ "type": "gemnasium",
+ "name": "Gemnasium-9952e574-7b5b-46fa-a270-aeb694198a98",
+ "value": "9952e574-7b5b-46fa-a270-aeb694198a98",
+ "url": "https://deps.sec.gitlab.com/packages/npm/saml2-js/versions/1.5.0/advisories"
+ },
+ {
+ "type": "cve",
+ "name": "CVE-2017-11429",
+ "value": "CVE-2017-11429",
+ "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-11429"
+ }
+ ],
+ "links": [
+ {
+ "url": "https://github.com/Clever/saml2/commit/3546cb61fd541f219abda364c5b919633609ef3d#diff-af730f9f738de1c9ad87596df3f6de84R279"
+ },
+ {
+ "url": "https://github.com/Clever/saml2/issues/127"
+ },
+ {
+ "url": "https://www.kb.cert.org/vuls/id/475445"
+ }
+ ]
+ }
+ ],
+ "remediations": [
+ {
+ "fixes": [
+ {
+ "cve": "yarn.lock:saml2-js:gemnasium:9952e574-7b5b-46fa-a270-aeb694198a98"
+ }
+ ],
+ "summary": "Upgrade saml2-js",
+ "diff": "diff --git a/yarn.lock b/yarn.lock
index 0ecc92f..7fa4554 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2,103 +2,124 @@
 # yarn lockfile v1
 
 
-async@~0.2.7:
-  version "0.2.10"
-  resolved "http://registry.npmjs.org/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1"
-
-async@~1.5.2:
-  version "1.5.2"
-  resolved "http://registry.npmjs.org/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
+async@^2.1.5, async@^2.5.0:
+  version "2.6.1"
+  resolved "https://registry.yarnpkg.com/async/-/async-2.6.1.tgz#b245a23ca71930044ec53fa46aa00a3e87c6a610"
+  integrity sha512-fNEiL2+AZt6AlAw/29Cr0UDe4sRAHCpEHh54WMz+Bb7QfNcFw4h3loofyJpLeQs4Yx7yuqu/2dLgM5hKOs6HlQ==
+  dependencies:
+    lodash "^4.17.10"
 
-debug@^1.0.4:
-  version "1.0.5"
-  resolved "https://registry.yarnpkg.com/debug/-/debug-1.0.5.tgz#f7241217430f99dec4c2b473eab92228e874c2ac"
+debug@^2.6.0:
+  version "2.6.9"
+  resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
+  integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
   dependencies:
     ms "2.0.0"
 
-ejs@~0.8.3:
-  version "0.8.8"
-  resolved "https://registry.yarnpkg.com/ejs/-/ejs-0.8.8.tgz#ffdc56dcc35d02926dd50ad13439bbc54061d598"
+ejs@^2.5.6:
+  version "2.6.1"
+  resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.6.1.tgz#498ec0d495655abc6f23cd61868d926464071aa0"
+  integrity sha512-0xy4A/twfrRCnkhfk8ErDi5DqdAsAqeGxht4xkCUrsvhhbQNs7E+4jV0CN7+NKIY0aHE72+XvqtBIXzD31ZbXQ==
+
+lodash-node@~2.4.1:
+  version "2.4.1"
+  resolved "https://registry.yarnpkg.com/lodash-node/-/lodash-node-2.4.1.tgz#ea82f7b100c733d1a42af76801e506105e2a80ec"
+  integrity sha1-6oL3sQDHM9GkKvdoAeUGEF4qgOw=
+
+lodash@^4.17.10:
+  version "4.17.11"
+  resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d"
+  integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==
 
 ms@2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
+  integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
 
-node-forge@0.2.24:
-  version "0.2.24"
-  resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.2.24.tgz#fa6f846f42fa93f63a0a30c9fbff7b4e130e0858"
+node-forge@^0.7.0:
+  version "0.7.6"
+  resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac"
+  integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw==
 
 saml2-js@^1.5.0:
-  version "1.5.0"
-  resolved "https://registry.yarnpkg.com/saml2-js/-/saml2-js-1.5.0.tgz#c0d2268a179e7329d29eb25aa82df5503774b0d9"
+  version "1.12.4"
+  resolved "https://registry.yarnpkg.com/saml2-js/-/saml2-js-1.12.4.tgz#c288f20bda6d2b91073b16c94ea72f22349ac3b3"
+  integrity sha1-wojyC9ptK5EHOxbJTqcvIjSaw7M=
   dependencies:
-    async "~1.5.2"
-    debug "^1.0.4"
-    underscore "~1.6.0"
-    xml-crypto "^0.8.1"
-    xml-encryption "~0.7.4"
-    xml2js "~0.4.1"
-    xmlbuilder "~2.1.0"
-    xmldom "~0.1.19"
+    async "^2.5.0"
+    debug "^2.6.0"
+    underscore "^1.8.0"
+    xml-crypto "^0.10.0"
+    xml-encryption "^0.11.0"
+    xml2js "^0.4.0"
+    xmlbuilder "~2.2.0"
+    xmldom "^0.1.0"
 
 sax@>=0.6.0:
   version "1.2.4"
   resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
+  integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
 
-underscore@>=1.5.x:
+underscore@^1.8.0:
   version "1.9.1"
   resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.1.tgz#06dce34a0e68a7babc29b365b8e74b8925203961"
+  integrity sha512-5/4etnCkd9c8gwgowi5/om/mYO5ajCaOgdzj/oW+0eQV9WxKBDZw5+ycmKmeaTXjInS/W0BzpGLo2xR2aBwZdg==
 
-underscore@~1.6.0:
-  version "1.6.0"
-  resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.6.0.tgz#8b38b10cacdef63337b8b24e4ff86d45aea529a8"
-
-xml-crypto@^0.8.1:
-  version "0.8.5"
-  resolved "http://registry.npmjs.org/xml-crypto/-/xml-crypto-0.8.5.tgz#2bbcfb3eb33f3a82a218b822bf672b6b1c20e538"
+xml-crypto@^0.10.0:
+  version "0.10.1"
+  resolved "https://registry.yarnpkg.com/xml-crypto/-/xml-crypto-0.10.1.tgz#f832f74ccf56f24afcae1163a1fcab44d96774a8"
+  integrity sha1-+DL3TM9W8kr8rhFjofyrRNlndKg=
   dependencies:
     xmldom "=0.1.19"
     xpath.js ">=0.0.3"
 
-xml-encryption@~0.7.4:
-  version "0.7.4"
-  resolved "https://registry.yarnpkg.com/xml-encryption/-/xml-encryption-0.7.4.tgz#42791ec64d556d2455dcb9da0a54123665ac65c7"
+xml-encryption@^0.11.0:
+  version "0.11.2"
+  resolved "https://registry.yarnpkg.com/xml-encryption/-/xml-encryption-0.11.2.tgz#c217f5509547e34b500b829f2c0bca85cca73a21"
+  integrity sha512-jVvES7i5ovdO7N+NjgncA326xYKjhqeAnnvIgRnY7ROLCfFqEDLwP0Sxp/30SHG0AXQV1048T5yinOFyvwGFzg==
   dependencies:
-    async "~0.2.7"
-    ejs "~0.8.3"
-    node-forge "0.2.24"
+    async "^2.1.5"
+    ejs "^2.5.6"
+    node-forge "^0.7.0"
     xmldom "~0.1.15"
-    xpath "0.0.5"
+    xpath "0.0.27"
 
-xml2js@~0.4.1:
+xml2js@^0.4.0:
   version "0.4.19"
   resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
+  integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==
   dependencies:
     sax ">=0.6.0"
     xmlbuilder "~9.0.1"
 
-xmlbuilder@~2.1.0:
-  version "2.1.0"
-  resolved "http://registry.npmjs.org/xmlbuilder/-/xmlbuilder-2.1.0.tgz#6ddae31683b6df12100b29fc8a0d4f46349abbed"
+xmlbuilder@~2.2.0:
+  version "2.2.1"
+  resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-2.2.1.tgz#9326430f130d87435d4c4086643aa2926e105a32"
+  integrity sha1-kyZDDxMNh0NdTECGZDqikm4QWjI=
   dependencies:
-    underscore ">=1.5.x"
+    lodash-node "~2.4.1"
 
 xmlbuilder@~9.0.1:
   version "9.0.7"
-  resolved "http://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
+  resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
+  integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=
 
 xmldom@=0.1.19:
   version "0.1.19"
   resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.1.19.tgz#631fc07776efd84118bf25171b37ed4d075a0abc"
+  integrity sha1-Yx/Ad3bv2EEYvyUXGzftTQdaCrw=
 
-xmldom@~0.1.15, xmldom@~0.1.19:
+xmldom@^0.1.0, xmldom@~0.1.15:
   version "0.1.27"
   resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.1.27.tgz#d501f97b3bdb403af8ef9ecc20573187aadac0e9"
+  integrity sha1-1QH5ezvbQDr4757MIFcxh6rawOk=
 
 xpath.js@>=0.0.3:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/xpath.js/-/xpath.js-1.1.0.tgz#3816a44ed4bb352091083d002a383dd5104a5ff1"
+  integrity sha512-jg+qkfS4K8E7965sqaUl8mRngXiKb3WZGfONgE18pr03FUQiuSV6G+Ej4tS55B+rIQSFEIw3phdVAQ4pPqNWfQ==
 
-xpath@0.0.5:
-  version "0.0.5"
-  resolved "https://registry.yarnpkg.com/xpath/-/xpath-0.0.5.tgz#454036f6ef0f3df5af5d4ba4a119fb75674b3e6c"
+xpath@0.0.27:
+  version "0.0.27"
+  resolved "https://registry.yarnpkg.com/xpath/-/xpath-0.0.27.tgz#dd3421fbdcc5646ac32c48531b4d7e9d0c2cfa92"
+  integrity sha512-fg03WRxtkCV6ohClePNAECYsmpKKTv5L8y/X3Dn1hQrec3POx2jHZ/0P2qQ6HvsrU1BmeqXcof3NGGueG6LxwQ==
"
+ }
+ ]
+}
diff --git a/spec/fixtures/security-reports/remediations/remediation.patch b/spec/fixtures/security-reports/remediations/remediation.patch
new file mode 100644
index 00000000000..bbfb6874627
--- /dev/null
+++ b/spec/fixtures/security-reports/remediations/remediation.patch
@@ -0,0 +1,180 @@
+diff --git a/yarn.lock b/yarn.lock
+index 0ecc92f..7fa4554 100644
+--- a/yarn.lock
++++ b/yarn.lock
+@@ -2,103 +2,124 @@
+ # yarn lockfile v1
+
+
+-async@~0.2.7:
+- version "0.2.10"
+- resolved "http://registry.npmjs.org/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1"
+-
+-async@~1.5.2:
+- version "1.5.2"
+- resolved "http://registry.npmjs.org/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
++async@^2.1.5, async@^2.5.0:
++ version "2.6.1"
++ resolved "https://registry.yarnpkg.com/async/-/async-2.6.1.tgz#b245a23ca71930044ec53fa46aa00a3e87c6a610"
++ integrity sha512-fNEiL2+AZt6AlAw/29Cr0UDe4sRAHCpEHh54WMz+Bb7QfNcFw4h3loofyJpLeQs4Yx7yuqu/2dLgM5hKOs6HlQ==
++ dependencies:
++ lodash "^4.17.10"
+
+-debug@^1.0.4:
+- version "1.0.5"
+- resolved "https://registry.yarnpkg.com/debug/-/debug-1.0.5.tgz#f7241217430f99dec4c2b473eab92228e874c2ac"
++debug@^2.6.0:
++ version "2.6.9"
++ resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
++ integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
+ dependencies:
+ ms "2.0.0"
+
+-ejs@~0.8.3:
+- version "0.8.8"
+- resolved "https://registry.yarnpkg.com/ejs/-/ejs-0.8.8.tgz#ffdc56dcc35d02926dd50ad13439bbc54061d598"
++ejs@^2.5.6:
++ version "2.6.1"
++ resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.6.1.tgz#498ec0d495655abc6f23cd61868d926464071aa0"
++ integrity sha512-0xy4A/twfrRCnkhfk8ErDi5DqdAsAqeGxht4xkCUrsvhhbQNs7E+4jV0CN7+NKIY0aHE72+XvqtBIXzD31ZbXQ==
++
++lodash-node@~2.4.1:
++ version "2.4.1"
++ resolved "https://registry.yarnpkg.com/lodash-node/-/lodash-node-2.4.1.tgz#ea82f7b100c733d1a42af76801e506105e2a80ec"
++ integrity sha1-6oL3sQDHM9GkKvdoAeUGEF4qgOw=
++
++lodash@^4.17.10:
++ version "4.17.11"
++ resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d"
++ integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==
+
+ ms@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
++ integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
+
+-node-forge@0.2.24:
+- version "0.2.24"
+- resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.2.24.tgz#fa6f846f42fa93f63a0a30c9fbff7b4e130e0858"
++node-forge@^0.7.0:
++ version "0.7.6"
++ resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac"
++ integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw==
+
+ saml2-js@^1.5.0:
+- version "1.5.0"
+- resolved "https://registry.yarnpkg.com/saml2-js/-/saml2-js-1.5.0.tgz#c0d2268a179e7329d29eb25aa82df5503774b0d9"
++ version "1.12.4"
++ resolved "https://registry.yarnpkg.com/saml2-js/-/saml2-js-1.12.4.tgz#c288f20bda6d2b91073b16c94ea72f22349ac3b3"
++ integrity sha1-wojyC9ptK5EHOxbJTqcvIjSaw7M=
+ dependencies:
+- async "~1.5.2"
+- debug "^1.0.4"
+- underscore "~1.6.0"
+- xml-crypto "^0.8.1"
+- xml-encryption "~0.7.4"
+- xml2js "~0.4.1"
+- xmlbuilder "~2.1.0"
+- xmldom "~0.1.19"
++ async "^2.5.0"
++ debug "^2.6.0"
++ underscore "^1.8.0"
++ xml-crypto "^0.10.0"
++ xml-encryption "^0.11.0"
++ xml2js "^0.4.0"
++ xmlbuilder "~2.2.0"
++ xmldom "^0.1.0"
+
+ sax@>=0.6.0:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
++ integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==
+
+-underscore@>=1.5.x:
++underscore@^1.8.0:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.1.tgz#06dce34a0e68a7babc29b365b8e74b8925203961"
++ integrity sha512-5/4etnCkd9c8gwgowi5/om/mYO5ajCaOgdzj/oW+0eQV9WxKBDZw5+ycmKmeaTXjInS/W0BzpGLo2xR2aBwZdg==
+
+-underscore@~1.6.0:
+- version "1.6.0"
+- resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.6.0.tgz#8b38b10cacdef63337b8b24e4ff86d45aea529a8"
+-
+-xml-crypto@^0.8.1:
+- version "0.8.5"
+- resolved "http://registry.npmjs.org/xml-crypto/-/xml-crypto-0.8.5.tgz#2bbcfb3eb33f3a82a218b822bf672b6b1c20e538"
++xml-crypto@^0.10.0:
++ version "0.10.1"
++ resolved "https://registry.yarnpkg.com/xml-crypto/-/xml-crypto-0.10.1.tgz#f832f74ccf56f24afcae1163a1fcab44d96774a8"
++ integrity sha1-+DL3TM9W8kr8rhFjofyrRNlndKg=
+ dependencies:
+ xmldom "=0.1.19"
+ xpath.js ">=0.0.3"
+
+-xml-encryption@~0.7.4:
+- version "0.7.4"
+- resolved "https://registry.yarnpkg.com/xml-encryption/-/xml-encryption-0.7.4.tgz#42791ec64d556d2455dcb9da0a54123665ac65c7"
++xml-encryption@^0.11.0:
++ version "0.11.2"
++ resolved "https://registry.yarnpkg.com/xml-encryption/-/xml-encryption-0.11.2.tgz#c217f5509547e34b500b829f2c0bca85cca73a21"
++ integrity sha512-jVvES7i5ovdO7N+NjgncA326xYKjhqeAnnvIgRnY7ROLCfFqEDLwP0Sxp/30SHG0AXQV1048T5yinOFyvwGFzg==
+ dependencies:
+- async "~0.2.7"
+- ejs "~0.8.3"
+- node-forge "0.2.24"
++ async "^2.1.5"
++ ejs "^2.5.6"
++ node-forge "^0.7.0"
+ xmldom "~0.1.15"
+- xpath "0.0.5"
++ xpath "0.0.27"
+
+-xml2js@~0.4.1:
++xml2js@^0.4.0:
+ version "0.4.19"
+ resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
++ integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==
+ dependencies:
+ sax ">=0.6.0"
+ xmlbuilder "~9.0.1"
+
+-xmlbuilder@~2.1.0:
+- version "2.1.0"
+- resolved "http://registry.npmjs.org/xmlbuilder/-/xmlbuilder-2.1.0.tgz#6ddae31683b6df12100b29fc8a0d4f46349abbed"
++xmlbuilder@~2.2.0:
++ version "2.2.1"
++ resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-2.2.1.tgz#9326430f130d87435d4c4086643aa2926e105a32"
++ integrity sha1-kyZDDxMNh0NdTECGZDqikm4QWjI=
+ dependencies:
+- underscore ">=1.5.x"
++ lodash-node "~2.4.1"
+
+ xmlbuilder@~9.0.1:
+ version "9.0.7"
+- resolved "http://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
++ resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
++ integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=
+
+ xmldom@=0.1.19:
+ version "0.1.19"
+ resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.1.19.tgz#631fc07776efd84118bf25171b37ed4d075a0abc"
++ integrity sha1-Yx/Ad3bv2EEYvyUXGzftTQdaCrw=
+
+-xmldom@~0.1.15, xmldom@~0.1.19:
++xmldom@^0.1.0, xmldom@~0.1.15:
+ version "0.1.27"
+ resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.1.27.tgz#d501f97b3bdb403af8ef9ecc20573187aadac0e9"
++ integrity sha1-1QH5ezvbQDr4757MIFcxh6rawOk=
+
+ xpath.js@>=0.0.3:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/xpath.js/-/xpath.js-1.1.0.tgz#3816a44ed4bb352091083d002a383dd5104a5ff1"
++ integrity sha512-jg+qkfS4K8E7965sqaUl8mRngXiKb3WZGfONgE18pr03FUQiuSV6G+Ej4tS55B+rIQSFEIw3phdVAQ4pPqNWfQ==
+
+-xpath@0.0.5:
+- version "0.0.5"
+- resolved "https://registry.yarnpkg.com/xpath/-/xpath-0.0.5.tgz#454036f6ef0f3df5af5d4ba4a119fb75674b3e6c"
++xpath@0.0.27:
++ version "0.0.27"
++ resolved "https://registry.yarnpkg.com/xpath/-/xpath-0.0.27.tgz#dd3421fbdcc5646ac32c48531b4d7e9d0c2cfa92"
++ integrity sha512-fg03WRxtkCV6ohClePNAECYsmpKKTv5L8y/X3Dn1hQrec3POx2jHZ/0P2qQ6HvsrU1BmeqXcof3NGGueG6LxwQ==
diff --git a/spec/fixtures/security-reports/remediations/yarn.lock b/spec/fixtures/security-reports/remediations/yarn.lock
new file mode 100644
index 00000000000..0ecc92fb711
--- /dev/null
+++ b/spec/fixtures/security-reports/remediations/yarn.lock
@@ -0,0 +1,104 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+async@~0.2.7:
+ version "0.2.10"
+ resolved "http://registry.npmjs.org/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1"
+
+async@~1.5.2:
+ version "1.5.2"
+ resolved "http://registry.npmjs.org/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
+
+debug@^1.0.4:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-1.0.5.tgz#f7241217430f99dec4c2b473eab92228e874c2ac"
+ dependencies:
+ ms "2.0.0"
+
+ejs@~0.8.3:
+ version "0.8.8"
+ resolved "https://registry.yarnpkg.com/ejs/-/ejs-0.8.8.tgz#ffdc56dcc35d02926dd50ad13439bbc54061d598"
+
+ms@2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
+
+node-forge@0.2.24:
+ version "0.2.24"
+ resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.2.24.tgz#fa6f846f42fa93f63a0a30c9fbff7b4e130e0858"
+
+saml2-js@^1.5.0:
+ version "1.5.0"
+ resolved "https://registry.yarnpkg.com/saml2-js/-/saml2-js-1.5.0.tgz#c0d2268a179e7329d29eb25aa82df5503774b0d9"
+ dependencies:
+ async "~1.5.2"
+ debug "^1.0.4"
+ underscore "~1.6.0"
+ xml-crypto "^0.8.1"
+ xml-encryption "~0.7.4"
+ xml2js "~0.4.1"
+ xmlbuilder "~2.1.0"
+ xmldom "~0.1.19"
+
+sax@>=0.6.0:
+ version "1.2.4"
+ resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
+
+underscore@>=1.5.x:
+ version "1.9.1"
+ resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.1.tgz#06dce34a0e68a7babc29b365b8e74b8925203961"
+
+underscore@~1.6.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.6.0.tgz#8b38b10cacdef63337b8b24e4ff86d45aea529a8"
+
+xml-crypto@^0.8.1:
+ version "0.8.5"
+ resolved "http://registry.npmjs.org/xml-crypto/-/xml-crypto-0.8.5.tgz#2bbcfb3eb33f3a82a218b822bf672b6b1c20e538"
+ dependencies:
+ xmldom "=0.1.19"
+ xpath.js ">=0.0.3"
+
+xml-encryption@~0.7.4:
+ version "0.7.4"
+ resolved "https://registry.yarnpkg.com/xml-encryption/-/xml-encryption-0.7.4.tgz#42791ec64d556d2455dcb9da0a54123665ac65c7"
+ dependencies:
+ async "~0.2.7"
+ ejs "~0.8.3"
+ node-forge "0.2.24"
+ xmldom "~0.1.15"
+ xpath "0.0.5"
+
+xml2js@~0.4.1:
+ version "0.4.19"
+ resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
+ dependencies:
+ sax ">=0.6.0"
+ xmlbuilder "~9.0.1"
+
+xmlbuilder@~2.1.0:
+ version "2.1.0"
+ resolved "http://registry.npmjs.org/xmlbuilder/-/xmlbuilder-2.1.0.tgz#6ddae31683b6df12100b29fc8a0d4f46349abbed"
+ dependencies:
+ underscore ">=1.5.x"
+
+xmlbuilder@~9.0.1:
+ version "9.0.7"
+ resolved "http://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
+
+xmldom@=0.1.19:
+ version "0.1.19"
+ resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.1.19.tgz#631fc07776efd84118bf25171b37ed4d075a0abc"
+
+xmldom@~0.1.15, xmldom@~0.1.19:
+ version "0.1.27"
+ resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.1.27.tgz#d501f97b3bdb403af8ef9ecc20573187aadac0e9"
+
+xpath.js@>=0.0.3:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/xpath.js/-/xpath.js-1.1.0.tgz#3816a44ed4bb352091083d002a383dd5104a5ff1"
+
+xpath@0.0.5:
+ version "0.0.5"
+ resolved "https://registry.yarnpkg.com/xpath/-/xpath-0.0.5.tgz#454036f6ef0f3df5af5d4ba4a119fb75674b3e6c"
diff --git a/spec/fixtures/trace/sample_trace b/spec/fixtures/trace/sample_trace
index 3d8beb0dec2..8f9747f8143 100644
--- a/spec/fixtures/trace/sample_trace
+++ b/spec/fixtures/trace/sample_trace
@@ -1795,7 +1795,7 @@ GroupsController
when requesting a redirected path
returns not found
PUT transfer
- when transfering to a subgroup goes right
+ when transferring to a subgroup goes right
should return a notice (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example)
should redirect to the new path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example)
when converting to a root group goes right
@@ -2299,7 +2299,7 @@ Groups::TransferService
should update subgroups path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example)
should update projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example)
should create redirect for the subgroups and projects (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example)
- when transfering a group with nested groups and projects
+ when transferring a group with nested groups and projects
should update subgroups path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example)
should update projects path (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example)
should create redirect for the subgroups and projects (PENDING: around hook at ./spec/spec_helper.rb:190 did not execute the example)
@@ -2426,9 +2426,9 @@ Groups::MilestonesController
lists legacy group milestones and group milestones
#show
when there is a title parameter
- searchs for a legacy group milestone
+ searches for a legacy group milestone
when there is not a title parameter
- searchs for a group milestone
+ searches for a group milestone
behaves like milestone tabs
#merge_requests
as html
@@ -3109,11 +3109,11 @@ Pending: (Failures listed here are expected and do not affect your suite's statu
# around hook at ./spec/spec_helper.rb:186 did not execute the example
# ./spec/controllers/groups_controller_spec.rb:129
- 15) GroupsController PUT transfer when transfering to a subgroup goes right should return a notice
+ 15) GroupsController PUT transfer when transferring to a subgroup goes right should return a notice
# around hook at ./spec/spec_helper.rb:190 did not execute the example
# ./spec/controllers/groups_controller_spec.rb:516
- 16) GroupsController PUT transfer when transfering to a subgroup goes right should redirect to the new path
+ 16) GroupsController PUT transfer when transferring to a subgroup goes right should redirect to the new path
# around hook at ./spec/spec_helper.rb:190 did not execute the example
# ./spec/controllers/groups_controller_spec.rb:520
@@ -3301,15 +3301,15 @@ Pending: (Failures listed here are expected and do not affect your suite's statu
# around hook at ./spec/spec_helper.rb:190 did not execute the example
# ./spec/services/groups/transfer_service_spec.rb:341
- 63) Groups::TransferService#execute when transferring a subgroup into another group when transfering a group with nested groups and projects should update subgroups path
+ 63) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with nested groups and projects should update subgroups path
# around hook at ./spec/spec_helper.rb:190 did not execute the example
# ./spec/services/groups/transfer_service_spec.rb:363
- 64) Groups::TransferService#execute when transferring a subgroup into another group when transfering a group with nested groups and projects should update projects path
+ 64) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with nested groups and projects should update projects path
# around hook at ./spec/spec_helper.rb:190 did not execute the example
# ./spec/services/groups/transfer_service_spec.rb:375
- 65) Groups::TransferService#execute when transferring a subgroup into another group when transfering a group with nested groups and projects should create redirect for the subgroups and projects
+ 65) Groups::TransferService#execute when transferring a subgroup into another group when transferring a group with nested groups and projects should create redirect for the subgroups and projects
# around hook at ./spec/spec_helper.rb:190 did not execute the example
# ./spec/services/groups/transfer_service_spec.rb:383
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index b79e6e0fe7b..c7008c780d6 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -6,21 +6,17 @@ import GfmAutoComplete from '~/gfm_auto_complete';
import 'vendor/jquery.caret';
import 'vendor/jquery.atwho';
-import { TEST_HOST } from 'helpers/test_constants';
-import labelsFixture from 'fixtures/autocomplete_sources/labels.json'; // eslint-disable-line import/no-unresolved
-
describe('GfmAutoComplete', () => {
const gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call({
fetchData: () => {},
});
let atwhoInstance;
+ let items;
let sorterValue;
describe('DefaultOptions.sorter', () => {
describe('assets loading', () => {
- let items;
-
beforeEach(() => {
jest.spyOn(GfmAutoComplete, 'isLoading').mockReturnValue(true);
@@ -65,7 +61,7 @@ describe('GfmAutoComplete', () => {
atwhoInstance = { setting: {} };
const query = 'query';
- const items = [];
+ items = [];
const searchKey = 'searchKey';
gfmAutoCompleteCallbacks.sorter.call(atwhoInstance, query, items, searchKey);
@@ -254,90 +250,4 @@ describe('GfmAutoComplete', () => {
).toBe('<li><small>grp/proj#5</small> Some Issue</li>');
});
});
-
- describe('labels', () => {
- const dataSources = {
- labels: `${TEST_HOST}/autocomplete_sources/labels`,
- };
-
- const allLabels = labelsFixture;
- const assignedLabels = allLabels.filter(label => label.set);
- const unassignedLabels = allLabels.filter(label => !label.set);
-
- let autocomplete;
- let $textarea;
-
- beforeEach(() => {
- autocomplete = new GfmAutoComplete(dataSources);
- $textarea = $('<textarea></textarea>');
- autocomplete.setup($textarea, { labels: true });
- });
-
- afterEach(() => {
- autocomplete.destroy();
- });
-
- const triggerDropdown = text => {
- $textarea
- .trigger('focus')
- .val(text)
- .caret('pos', -1);
- $textarea.trigger('keyup');
-
- return new Promise(window.requestAnimationFrame);
- };
-
- const getDropdownItems = () => {
- const dropdown = document.getElementById('at-view-labels');
- const items = dropdown.getElementsByTagName('li');
- return [].map.call(items, item => item.textContent.trim());
- };
-
- const expectLabels = ({ input, output }) =>
- triggerDropdown(input).then(() => {
- expect(getDropdownItems()).toEqual(output.map(label => label.title));
- });
-
- describe('with no labels assigned', () => {
- beforeEach(() => {
- autocomplete.cachedData['~'] = [...unassignedLabels];
- });
-
- it.each`
- input | output
- ${'~'} | ${unassignedLabels}
- ${'/label ~'} | ${unassignedLabels}
- ${'/relabel ~'} | ${unassignedLabels}
- ${'/unlabel ~'} | ${[]}
- `('$input shows $output.length labels', expectLabels);
- });
-
- describe('with some labels assigned', () => {
- beforeEach(() => {
- autocomplete.cachedData['~'] = allLabels;
- });
-
- it.each`
- input | output
- ${'~'} | ${allLabels}
- ${'/label ~'} | ${unassignedLabels}
- ${'/relabel ~'} | ${allLabels}
- ${'/unlabel ~'} | ${assignedLabels}
- `('$input shows $output.length labels', expectLabels);
- });
-
- describe('with all labels assigned', () => {
- beforeEach(() => {
- autocomplete.cachedData['~'] = [...assignedLabels];
- });
-
- it.each`
- input | output
- ${'~'} | ${assignedLabels}
- ${'/label ~'} | ${[]}
- ${'/relabel ~'} | ${assignedLabels}
- ${'/unlabel ~'} | ${assignedLabels}
- `('$input shows $output.length labels', expectLabels);
- });
- });
});
diff --git a/spec/graphql/resolvers/metadata_resolver_spec.rb b/spec/graphql/resolvers/metadata_resolver_spec.rb
new file mode 100644
index 00000000000..e662ed127a5
--- /dev/null
+++ b/spec/graphql/resolvers/metadata_resolver_spec.rb
@@ -0,0 +1,11 @@
+require 'spec_helper'
+
+describe Resolvers::MetadataResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ it 'returns version and revision' do
+ expect(resolve(described_class)).to eq(version: Gitlab::VERSION, revision: Gitlab.revision)
+ end
+ end
+end
diff --git a/spec/graphql/types/metadata_type_spec.rb b/spec/graphql/types/metadata_type_spec.rb
new file mode 100644
index 00000000000..55205bf5b6a
--- /dev/null
+++ b/spec/graphql/types/metadata_type_spec.rb
@@ -0,0 +1,5 @@
+require 'spec_helper'
+
+describe GitlabSchema.types['Metadata'] do
+ it { expect(described_class.graphql_name).to eq('Metadata') }
+end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index e1df6f9811d..07c61ea7647 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -5,7 +5,7 @@ describe GitlabSchema.types['Query'] do
expect(described_class.graphql_name).to eq('Query')
end
- it { is_expected.to have_graphql_fields(:project, :echo) }
+ it { is_expected.to have_graphql_fields(:project, :echo, :metadata) }
describe 'project field' do
subject { described_class.fields['project'] }
@@ -20,4 +20,17 @@ describe GitlabSchema.types['Query'] do
is_expected.to require_graphql_authorizations(:read_project)
end
end
+
+ describe 'metadata field' do
+ subject { described_class.fields['metadata'] }
+
+ it 'returns metadata' do
+ is_expected.to have_graphql_type(Types::MetadataType)
+ is_expected.to have_graphql_resolver(Resolvers::MetadataResolver)
+ end
+
+ it 'authorizes with read_instance_metadata' do
+ is_expected.to require_graphql_authorizations(:read_instance_metadata)
+ end
+ end
end
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index 23d7e41803e..03b4c19ec22 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -142,4 +142,58 @@ describe EmailsHelper do
end
end
end
+
+ describe 'header and footer messages' do
+ context 'when email_header_and_footer_enabled is enabled' do
+ it 'returns header and footer messages' do
+ create :appearance, header_message: 'Foo', footer_message: 'Bar', email_header_and_footer_enabled: true
+
+ aggregate_failures do
+ expect(html_header_message).to eq(%{<div class="header-message" style=""><p>Foo</p></div>})
+ expect(html_footer_message).to eq(%{<div class="footer-message" style=""><p>Bar</p></div>})
+ expect(text_header_message).to eq('Foo')
+ expect(text_footer_message).to eq('Bar')
+ end
+ end
+
+ context 'when header and footer messages are empty' do
+ it 'returns nil' do
+ create :appearance, header_message: '', footer_message: '', email_header_and_footer_enabled: true
+
+ aggregate_failures do
+ expect(html_header_message).to eq(nil)
+ expect(html_footer_message).to eq(nil)
+ expect(text_header_message).to eq(nil)
+ expect(text_footer_message).to eq(nil)
+ end
+ end
+ end
+
+ context 'when header and footer messages are nil' do
+ it 'returns nil' do
+ create :appearance, header_message: nil, footer_message: nil, email_header_and_footer_enabled: true
+
+ aggregate_failures do
+ expect(html_header_message).to eq(nil)
+ expect(html_footer_message).to eq(nil)
+ expect(text_header_message).to eq(nil)
+ expect(text_footer_message).to eq(nil)
+ end
+ end
+ end
+ end
+
+ context 'when email_header_and_footer_enabled is disabled' do
+ it 'returns header and footer messages' do
+ create :appearance, header_message: 'Foo', footer_message: 'Bar', email_header_and_footer_enabled: false
+
+ aggregate_failures do
+ expect(html_header_message).to eq(nil)
+ expect(html_footer_message).to eq(nil)
+ expect(text_header_message).to eq(nil)
+ expect(text_footer_message).to eq(nil)
+ end
+ end
+ end
+ end
end
diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb
index e0e8ebd0c3c..db0d45c3692 100644
--- a/spec/helpers/preferences_helper_spec.rb
+++ b/spec/helpers/preferences_helper_spec.rb
@@ -36,10 +36,11 @@ describe PreferencesHelper do
end
describe '#first_day_of_week_choices' do
- it 'returns Sunday and Monday as choices' do
+ it 'returns Saturday, Sunday and Monday as choices' do
expect(helper.first_day_of_week_choices).to eq [
['Sunday', 0],
- ['Monday', 1]
+ ['Monday', 1],
+ ['Saturday', 6]
]
end
end
@@ -47,14 +48,21 @@ describe PreferencesHelper do
describe '#first_day_of_week_choices_with_default' do
it 'returns choices including system default' do
expect(helper.first_day_of_week_choices_with_default).to eq [
- ['System default (Sunday)', nil], ['Sunday', 0], ['Monday', 1]
+ ['System default (Sunday)', nil], ['Sunday', 0], ['Monday', 1], ['Saturday', 6]
]
end
it 'returns choices including system default set to Monday' do
stub_application_setting(first_day_of_week: 1)
expect(helper.first_day_of_week_choices_with_default).to eq [
- ['System default (Monday)', nil], ['Sunday', 0], ['Monday', 1]
+ ['System default (Monday)', nil], ['Sunday', 0], ['Monday', 1], ['Saturday', 6]
+ ]
+ end
+
+ it 'returns choices including system default set to Saturday' do
+ stub_application_setting(first_day_of_week: 6)
+ expect(helper.first_day_of_week_choices_with_default).to eq [
+ ['System default (Saturday)', nil], ['Sunday', 0], ['Monday', 1], ['Saturday', 6]
]
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 49895b0680b..291eafece94 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -3,6 +3,56 @@ require 'spec_helper'
describe ProjectsHelper do
include ProjectForksHelper
+ describe '#error_tracking_setting_project_json' do
+ let(:project) { create(:project) }
+
+ context 'error tracking setting does not exist' do
+ before do
+ helper.instance_variable_set(:@project, project)
+ end
+
+ it 'returns nil' do
+ expect(helper.error_tracking_setting_project_json).to be_nil
+ end
+ end
+
+ context 'error tracking setting exists' do
+ let!(:error_tracking_setting) { create(:project_error_tracking_setting, project: project) }
+
+ context 'api_url present' do
+ let(:json) do
+ {
+ name: error_tracking_setting.project_name,
+ organization_name: error_tracking_setting.organization_name,
+ organization_slug: error_tracking_setting.organization_slug,
+ slug: error_tracking_setting.project_slug
+ }.to_json
+ end
+
+ before do
+ helper.instance_variable_set(:@project, project)
+ end
+
+ it 'returns error tracking json' do
+ expect(helper.error_tracking_setting_project_json).to eq(json)
+ end
+ end
+
+ context 'api_url not present' do
+ before do
+ project.error_tracking_setting.api_url = nil
+ project.error_tracking_setting.enabled = false
+
+ helper.instance_variable_set(:@project, project)
+ end
+
+ it 'returns nil' do
+ expect(helper.error_tracking_setting_project_json).to be_nil
+ end
+ end
+ end
+ end
+
describe "#project_status_css_class" do
it "returns appropriate class" do
expect(project_status_css_class("started")).to eq("table-active")
diff --git a/spec/javascripts/api_spec.js b/spec/javascripts/api_spec.js
index 1e9470970ff..e537e0e8afc 100644
--- a/spec/javascripts/api_spec.js
+++ b/spec/javascripts/api_spec.js
@@ -139,6 +139,40 @@ describe('Api', () => {
});
});
+ describe('projectMergeRequests', () => {
+ const projectPath = 'abc';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests`;
+
+ it('fetches all merge requests for a project', done => {
+ const mockData = [{ source_branch: 'foo' }, { source_branch: 'bar' }];
+ mock.onGet(expectedUrl).reply(200, mockData);
+ Api.projectMergeRequests(projectPath)
+ .then(({ data }) => {
+ expect(data.length).toEqual(2);
+ expect(data[0].source_branch).toBe('foo');
+ expect(data[1].source_branch).toBe('bar');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('fetches merge requests filtered with passed params', done => {
+ const params = {
+ source_branch: 'bar',
+ };
+ const mockData = [{ source_branch: 'bar' }];
+ mock.onGet(expectedUrl, { params }).reply(200, mockData);
+
+ Api.projectMergeRequests(projectPath, params)
+ .then(({ data }) => {
+ expect(data.length).toEqual(1);
+ expect(data[0].source_branch).toBe('bar');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
describe('projectMergeRequest', () => {
it('fetches a merge request', done => {
const projectPath = 'abc';
diff --git a/spec/javascripts/awards_handler_spec.js b/spec/javascripts/awards_handler_spec.js
index ce5d2022441..e5b5707dcef 100644
--- a/spec/javascripts/awards_handler_spec.js
+++ b/spec/javascripts/awards_handler_spec.js
@@ -1,12 +1,16 @@
import $ from 'jquery';
import Cookies from 'js-cookie';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import loadAwardsHandler from '~/awards_handler';
import '~/lib/utils/common_utils';
+import { EMOJI_VERSION } from '~/emoji';
window.gl = window.gl || {};
window.gon = window.gon || {};
let openAndWaitForEmojiMenu;
+let mock;
let awardsHandler = null;
const urlRoot = gon.relative_url_root;
@@ -19,8 +23,13 @@ const lazyAssert = function(done, assertFn) {
};
describe('AwardsHandler', function() {
+ const emojiData = getJSONFixture('emojis/emojis.json');
preloadFixtures('snippets/show.html.raw');
+
beforeEach(function(done) {
+ mock = new MockAdapter(axios);
+ mock.onGet(`/-/emojis/${EMOJI_VERSION}/emojis.json`).reply(200, emojiData);
+
loadFixtures('snippets/show.html.raw');
loadAwardsHandler(true)
.then(obj => {
@@ -53,6 +62,8 @@ describe('AwardsHandler', function() {
// restore original url root value
gon.relative_url_root = urlRoot;
+ mock.restore();
+
// Undo what we did to the shared <body>
$('body').removeAttr('data-page');
diff --git a/spec/javascripts/badges/store/actions_spec.js b/spec/javascripts/badges/store/actions_spec.js
index 2623465ebd6..e8d5f8c3aac 100644
--- a/spec/javascripts/badges/store/actions_spec.js
+++ b/spec/javascripts/badges/store/actions_spec.js
@@ -411,7 +411,7 @@ describe('Badges store actions', () => {
it('escapes user input', done => {
spyOn(axios, 'get').and.callFake(() => Promise.resolve({ data: createDummyBadgeResponse() }));
- badgeInForm.imageUrl = '&make-sandwhich=true';
+ badgeInForm.imageUrl = '&make-sandwich=true';
badgeInForm.linkUrl = '<script>I am dangerous!</script>';
actions
@@ -422,7 +422,7 @@ describe('Badges store actions', () => {
expect(url).toMatch(`^${dummyEndpointUrl}/render?`);
expect(url).toMatch('\\?link_url=%3Cscript%3EI%20am%20dangerous!%3C%2Fscript%3E&');
- expect(url).toMatch('&image_url=%26make-sandwhich%3Dtrue$');
+ expect(url).toMatch('&image_url=%26make-sandwich%3Dtrue$');
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/behaviors/copy_as_gfm_spec.js b/spec/javascripts/behaviors/copy_as_gfm_spec.js
index ca849f75860..d653fca0988 100644
--- a/spec/javascripts/behaviors/copy_as_gfm_spec.js
+++ b/spec/javascripts/behaviors/copy_as_gfm_spec.js
@@ -100,7 +100,7 @@ describe('CopyAsGFM', () => {
simulateCopy();
setTimeout(() => {
- const expectedGFM = '* List Item1\n\n* List Item2';
+ const expectedGFM = '* List Item1\n* List Item2';
expect(clipboardData.setData).toHaveBeenCalledWith('text/x-gfm', expectedGFM);
done();
@@ -114,7 +114,7 @@ describe('CopyAsGFM', () => {
simulateCopy();
setTimeout(() => {
- const expectedGFM = '1. List Item1\n\n1. List Item2';
+ const expectedGFM = '1. List Item1\n1. List Item2';
expect(clipboardData.setData).toHaveBeenCalledWith('text/x-gfm', expectedGFM);
done();
diff --git a/spec/javascripts/boards/components/issue_due_date_spec.js b/spec/javascripts/boards/components/issue_due_date_spec.js
index 054cf8c5b7d..68e26b68f04 100644
--- a/spec/javascripts/boards/components/issue_due_date_spec.js
+++ b/spec/javascripts/boards/components/issue_due_date_spec.js
@@ -43,7 +43,7 @@ describe('Issue Due Date component', () => {
date.setDate(date.getDate() + 5);
vm = createComponent(date);
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual(dateFormat(date, 'dddd', true));
+ expect(vm.$el.querySelector('time').textContent.trim()).toEqual(dateFormat(date, 'dddd'));
});
it('should render month and day for other dates', () => {
@@ -53,7 +53,7 @@ describe('Issue Due Date component', () => {
const isDueInCurrentYear = today.getFullYear() === date.getFullYear();
const format = isDueInCurrentYear ? 'mmm d' : 'mmm d, yyyy';
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual(dateFormat(date, format, true));
+ expect(vm.$el.querySelector('time').textContent.trim()).toEqual(dateFormat(date, format));
});
it('should contain the correct `.text-danger` css class for overdue issue', () => {
diff --git a/spec/javascripts/clusters/components/application_row_spec.js b/spec/javascripts/clusters/components/application_row_spec.js
index 8cb9713964e..a2dd4e93daf 100644
--- a/spec/javascripts/clusters/components/application_row_spec.js
+++ b/spec/javascripts/clusters/components/application_row_spec.js
@@ -230,7 +230,7 @@ describe('Application Row', () => {
expect(upgradeBtn.innerHTML).toContain('Upgrade');
});
- it('has enabled "Retry upgrade" when APPLICATION_STATUS.UPDATE_ERRORED', () => {
+ it('has enabled "Retry update" when APPLICATION_STATUS.UPDATE_ERRORED', () => {
vm = mountComponent(ApplicationRow, {
...DEFAULT_APPLICATION_STATE,
status: APPLICATION_STATUS.UPDATE_ERRORED,
@@ -239,10 +239,10 @@ describe('Application Row', () => {
expect(upgradeBtn).not.toBe(null);
expect(vm.upgradeFailed).toBe(true);
- expect(upgradeBtn.innerHTML).toContain('Retry upgrade');
+ expect(upgradeBtn.innerHTML).toContain('Retry update');
});
- it('has disabled "Retry upgrade" when APPLICATION_STATUS.UPDATING', () => {
+ it('has disabled "Updating" when APPLICATION_STATUS.UPDATING', () => {
vm = mountComponent(ApplicationRow, {
...DEFAULT_APPLICATION_STATE,
status: APPLICATION_STATUS.UPDATING,
@@ -251,7 +251,7 @@ describe('Application Row', () => {
expect(upgradeBtn).not.toBe(null);
expect(vm.isUpgrading).toBe(true);
- expect(upgradeBtn.innerHTML).toContain('Upgrading');
+ expect(upgradeBtn.innerHTML).toContain('Updating');
});
it('clicking upgrade button emits event', () => {
@@ -295,7 +295,7 @@ describe('Application Row', () => {
expect(failureMessage).not.toBe(null);
expect(failureMessage.innerHTML).toContain(
- 'Something went wrong when upgrading GitLab Runner. Please check the logs and try again.',
+ 'Update failed. Please check the logs and try again.',
);
});
});
diff --git a/spec/javascripts/clusters/components/applications_spec.js b/spec/javascripts/clusters/components/applications_spec.js
index 14ef1193984..8daf0282184 100644
--- a/spec/javascripts/clusters/components/applications_spec.js
+++ b/spec/javascripts/clusters/components/applications_spec.js
@@ -1,7 +1,9 @@
import Vue from 'vue';
import applications from '~/clusters/components/applications.vue';
import { CLUSTER_TYPE } from '~/clusters/constants';
+import eventHub from '~/clusters/event_hub';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import { APPLICATIONS_MOCK_STATE } from '../services/mock_data';
describe('Applications', () => {
let vm;
@@ -18,16 +20,8 @@ describe('Applications', () => {
describe('Project cluster applications', () => {
beforeEach(() => {
vm = mountComponent(Applications, {
+ applications: APPLICATIONS_MOCK_STATE,
type: CLUSTER_TYPE.PROJECT,
- applications: {
- helm: { title: 'Helm Tiller' },
- ingress: { title: 'Ingress' },
- cert_manager: { title: 'Cert-Manager' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub' },
- knative: { title: 'Knative' },
- },
});
});
@@ -64,15 +58,7 @@ describe('Applications', () => {
beforeEach(() => {
vm = mountComponent(Applications, {
type: CLUSTER_TYPE.GROUP,
- applications: {
- helm: { title: 'Helm Tiller' },
- ingress: { title: 'Ingress' },
- cert_manager: { title: 'Cert-Manager' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub' },
- knative: { title: 'Knative' },
- },
+ applications: APPLICATIONS_MOCK_STATE,
});
});
@@ -111,17 +97,12 @@ describe('Applications', () => {
it('renders ip address with a clipboard button', () => {
vm = mountComponent(Applications, {
applications: {
+ ...APPLICATIONS_MOCK_STATE,
ingress: {
title: 'Ingress',
status: 'installed',
externalIp: '0.0.0.0',
},
- helm: { title: 'Helm Tiller' },
- cert_manager: { title: 'Cert-Manager' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', hostname: '' },
- knative: { title: 'Knative', hostname: '' },
},
});
@@ -137,16 +118,11 @@ describe('Applications', () => {
it('renders an input text with a question mark and an alert text', () => {
vm = mountComponent(Applications, {
applications: {
+ ...APPLICATIONS_MOCK_STATE,
ingress: {
title: 'Ingress',
status: 'installed',
},
- helm: { title: 'Helm Tiller' },
- cert_manager: { title: 'Cert-Manager' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', hostname: '' },
- knative: { title: 'Knative', hostname: '' },
},
});
@@ -160,15 +136,7 @@ describe('Applications', () => {
describe('before installing', () => {
it('does not render the IP address', () => {
vm = mountComponent(Applications, {
- applications: {
- helm: { title: 'Helm Tiller' },
- ingress: { title: 'Ingress' },
- cert_manager: { title: 'Cert-Manager' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', hostname: '' },
- knative: { title: 'Knative', hostname: '' },
- },
+ applications: APPLICATIONS_MOCK_STATE,
});
expect(vm.$el.textContent).not.toContain('Ingress IP Address');
@@ -181,17 +149,12 @@ describe('Applications', () => {
it('renders email & allows editing', () => {
vm = mountComponent(Applications, {
applications: {
- helm: { title: 'Helm Tiller', status: 'installed' },
- ingress: { title: 'Ingress', status: 'installed', externalIp: '1.1.1.1' },
+ ...APPLICATIONS_MOCK_STATE,
cert_manager: {
title: 'Cert-Manager',
email: 'before@example.com',
status: 'installable',
},
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', hostname: '', status: 'installable' },
- knative: { title: 'Knative', hostname: '', status: 'installable' },
},
});
@@ -204,17 +167,12 @@ describe('Applications', () => {
it('renders email in readonly', () => {
vm = mountComponent(Applications, {
applications: {
- helm: { title: 'Helm Tiller', status: 'installed' },
- ingress: { title: 'Ingress', status: 'installed', externalIp: '1.1.1.1' },
+ ...APPLICATIONS_MOCK_STATE,
cert_manager: {
title: 'Cert-Manager',
email: 'after@example.com',
status: 'installed',
},
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', hostname: '', status: 'installable' },
- knative: { title: 'Knative', hostname: '', status: 'installable' },
},
});
@@ -229,13 +187,12 @@ describe('Applications', () => {
it('renders hostname active input', () => {
vm = mountComponent(Applications, {
applications: {
- helm: { title: 'Helm Tiller', status: 'installed' },
- ingress: { title: 'Ingress', status: 'installed', externalIp: '1.1.1.1' },
- cert_manager: { title: 'Cert-Manager' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', hostname: '', status: 'installable' },
- knative: { title: 'Knative', hostname: '', status: 'installable' },
+ ...APPLICATIONS_MOCK_STATE,
+ ingress: {
+ title: 'Ingress',
+ status: 'installed',
+ externalIp: '1.1.1.1',
+ },
},
});
@@ -247,13 +204,8 @@ describe('Applications', () => {
it('does not render hostname input', () => {
vm = mountComponent(Applications, {
applications: {
- helm: { title: 'Helm Tiller', status: 'installed' },
+ ...APPLICATIONS_MOCK_STATE,
ingress: { title: 'Ingress', status: 'installed' },
- cert_manager: { title: 'Cert-Manager' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', hostname: '', status: 'installable' },
- knative: { title: 'Knative', hostname: '', status: 'installable' },
},
});
@@ -265,13 +217,9 @@ describe('Applications', () => {
it('renders readonly input', () => {
vm = mountComponent(Applications, {
applications: {
- helm: { title: 'Helm Tiller', status: 'installed' },
+ ...APPLICATIONS_MOCK_STATE,
ingress: { title: 'Ingress', status: 'installed', externalIp: '1.1.1.1' },
- cert_manager: { title: 'Cert-Manager' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
jupyter: { title: 'JupyterHub', status: 'installed', hostname: '' },
- knative: { title: 'Knative', status: 'installed', hostname: '' },
},
});
@@ -282,15 +230,7 @@ describe('Applications', () => {
describe('without ingress installed', () => {
beforeEach(() => {
vm = mountComponent(Applications, {
- applications: {
- helm: { title: 'Helm Tiller' },
- ingress: { title: 'Ingress' },
- cert_manager: { title: 'Cert-Manager' },
- runner: { title: 'GitLab Runner' },
- prometheus: { title: 'Prometheus' },
- jupyter: { title: 'JupyterHub', status: 'not_installable' },
- knative: { title: 'Knative' },
- },
+ applications: APPLICATIONS_MOCK_STATE,
});
});
@@ -310,4 +250,77 @@ describe('Applications', () => {
});
});
});
+
+ describe('Knative application', () => {
+ describe('when installed', () => {
+ describe('with ip address', () => {
+ const props = {
+ applications: {
+ ...APPLICATIONS_MOCK_STATE,
+ knative: {
+ title: 'Knative',
+ hostname: 'example.com',
+ status: 'installed',
+ externalIp: '1.1.1.1',
+ },
+ },
+ };
+ it('renders ip address with a clipboard button', () => {
+ vm = mountComponent(Applications, props);
+
+ expect(vm.$el.querySelector('.js-knative-ip-address').value).toEqual('1.1.1.1');
+
+ expect(
+ vm.$el
+ .querySelector('.js-knative-ip-clipboard-btn')
+ .getAttribute('data-clipboard-text'),
+ ).toEqual('1.1.1.1');
+ });
+
+ it('renders domain & allows editing', () => {
+ expect(vm.$el.querySelector('.js-knative-domainname').value).toEqual('example.com');
+ expect(vm.$el.querySelector('.js-knative-domainname').getAttribute('readonly')).toBe(
+ null,
+ );
+ });
+
+ it('renders an update/save Knative domain button', () => {
+ expect(vm.$el.querySelector('.js-knative-save-domain-button')).not.toBe(null);
+ });
+
+ it('emits event when clicking Save changes button', () => {
+ spyOn(eventHub, '$emit');
+ vm = mountComponent(Applications, props);
+
+ const saveButton = vm.$el.querySelector('.js-knative-save-domain-button');
+
+ saveButton.click();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('saveKnativeDomain', {
+ id: 'knative',
+ params: { hostname: 'example.com' },
+ });
+ });
+ });
+
+ describe('without ip address', () => {
+ it('renders an input text with a question mark and an alert text', () => {
+ vm = mountComponent(Applications, {
+ applications: {
+ ...APPLICATIONS_MOCK_STATE,
+ knative: {
+ title: 'Knative',
+ hostname: 'example.com',
+ status: 'installed',
+ },
+ },
+ });
+
+ expect(vm.$el.querySelector('.js-knative-ip-address').value).toEqual('?');
+
+ expect(vm.$el.querySelector('.js-no-knative-ip-message')).not.toBe(null);
+ });
+ });
+ });
+ });
});
diff --git a/spec/javascripts/clusters/services/mock_data.js b/spec/javascripts/clusters/services/mock_data.js
index 3c3d9977ffb..3ace19c6401 100644
--- a/spec/javascripts/clusters/services/mock_data.js
+++ b/spec/javascripts/clusters/services/mock_data.js
@@ -115,4 +115,14 @@ const DEFAULT_APPLICATION_STATE = {
requestReason: null,
};
-export { CLUSTERS_MOCK_DATA, DEFAULT_APPLICATION_STATE };
+const APPLICATIONS_MOCK_STATE = {
+ helm: { title: 'Helm Tiller', status: 'installable' },
+ ingress: { title: 'Ingress', status: 'installable' },
+ cert_manager: { title: 'Cert-Manager', status: 'installable' },
+ runner: { title: 'GitLab Runner' },
+ prometheus: { title: 'Prometheus' },
+ jupyter: { title: 'JupyterHub', status: 'installable', hostname: '' },
+ knative: { title: 'Knative ', status: 'installable', hostname: '' },
+};
+
+export { CLUSTERS_MOCK_DATA, DEFAULT_APPLICATION_STATE, APPLICATIONS_MOCK_STATE };
diff --git a/spec/javascripts/clusters/stores/clusters_store_spec.js b/spec/javascripts/clusters/stores/clusters_store_spec.js
index 37a4d6614f6..09bcdf91d91 100644
--- a/spec/javascripts/clusters/stores/clusters_store_spec.js
+++ b/spec/javascripts/clusters/stores/clusters_store_spec.js
@@ -111,6 +111,7 @@ describe('Clusters Store', () => {
requestStatus: null,
requestReason: null,
hostname: null,
+ isEditingHostName: false,
externalIp: null,
},
cert_manager: {
diff --git a/spec/javascripts/diffs/components/app_spec.js b/spec/javascripts/diffs/components/app_spec.js
index bce6113f75a..d81c433cca6 100644
--- a/spec/javascripts/diffs/components/app_spec.js
+++ b/spec/javascripts/diffs/components/app_spec.js
@@ -1,11 +1,19 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'spec/test_constants';
import App from '~/diffs/components/app.vue';
import NoChanges from '~/diffs/components/no_changes.vue';
import DiffFile from '~/diffs/components/diff_file.vue';
import Mousetrap from 'mousetrap';
+import CompareVersions from '~/diffs/components/compare_versions.vue';
+import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
+import CommitWidget from '~/diffs/components/commit_widget.vue';
+import TreeList from '~/diffs/components/tree_list.vue';
import createDiffsStore from '../create_diffs_store';
+import diffsMockData from '../mock_data/merge_request_diffs';
+
+const mergeRequestDiff = { version_index: 1 };
describe('diffs/components/app', () => {
const oldMrTabs = window.mrTabs;
@@ -49,6 +57,21 @@ describe('diffs/components/app', () => {
wrapper.destroy();
});
+ it('displays loading icon on loading', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.isLoading = true;
+ });
+
+ expect(wrapper.contains(GlLoadingIcon)).toBe(true);
+ });
+
+ it('displays diffs container when not loading', () => {
+ createComponent();
+
+ expect(wrapper.contains(GlLoadingIcon)).toBe(false);
+ expect(wrapper.contains('#diffs')).toBe(true);
+ });
+
it('does not show commit info', () => {
createComponent();
@@ -134,8 +157,8 @@ describe('diffs/components/app', () => {
});
it('does not render empty state when diff files exist', () => {
- createComponent({}, () => {
- store.state.diffs.diffFiles.push({
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({
id: 1,
});
});
@@ -145,9 +168,9 @@ describe('diffs/components/app', () => {
});
it('does not render empty state when versions match', () => {
- createComponent({}, () => {
- store.state.diffs.startVersion = { version_index: 1 };
- store.state.diffs.mergeRequestDiff = { version_index: 1 };
+ createComponent({}, ({ state }) => {
+ state.diffs.startVersion = mergeRequestDiff;
+ state.diffs.mergeRequestDiff = mergeRequestDiff;
});
expect(wrapper.contains(NoChanges)).toBe(false);
@@ -307,4 +330,71 @@ describe('diffs/components/app', () => {
.catch(done.fail);
});
});
+
+ describe('diffs', () => {
+ it('should render compare versions component', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.mergeRequestDiffs = diffsMockData;
+ state.diffs.targetBranchName = 'target-branch';
+ state.diffs.mergeRequestDiff = mergeRequestDiff;
+ });
+
+ expect(wrapper.contains(CompareVersions)).toBe(true);
+ expect(wrapper.find(CompareVersions).props()).toEqual(
+ jasmine.objectContaining({
+ targetBranch: {
+ branchName: 'target-branch',
+ versionIndex: -1,
+ path: '',
+ },
+ mergeRequestDiffs: diffsMockData,
+ mergeRequestDiff,
+ }),
+ );
+ });
+
+ it('should render hidden files warning if render overflow warning is present', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.renderOverflowWarning = true;
+ state.diffs.realSize = '5';
+ state.diffs.plainDiffPath = 'plain diff path';
+ state.diffs.emailPatchPath = 'email patch path';
+ state.diffs.size = 1;
+ });
+
+ expect(wrapper.contains(HiddenFilesWarning)).toBe(true);
+ expect(wrapper.find(HiddenFilesWarning).props()).toEqual(
+ jasmine.objectContaining({
+ total: '5',
+ plainDiffPath: 'plain diff path',
+ emailPatchPath: 'email patch path',
+ visible: 1,
+ }),
+ );
+ });
+
+ it('should display commit widget if store has a commit', () => {
+ createComponent({}, () => {
+ store.state.diffs.commit = {
+ author: 'John Doe',
+ };
+ });
+
+ expect(wrapper.contains(CommitWidget)).toBe(true);
+ });
+
+ it('should display diff file if there are diff files', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({ sha: '123' });
+ });
+
+ expect(wrapper.contains(DiffFile)).toBe(true);
+ });
+
+ it('should render tree list', () => {
+ createComponent();
+
+ expect(wrapper.find(TreeList).exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/javascripts/diffs/components/changed_files_dropdown_spec.js b/spec/javascripts/diffs/components/changed_files_dropdown_spec.js
deleted file mode 100644
index 7237274eb43..00000000000
--- a/spec/javascripts/diffs/components/changed_files_dropdown_spec.js
+++ /dev/null
@@ -1 +0,0 @@
-// TODO: https://gitlab.com/gitlab-org/gitlab-ce/issues/48034
diff --git a/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js b/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
index 53b9ac22fc0..8a3834d542f 100644
--- a/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
+++ b/spec/javascripts/diffs/components/compare_versions_dropdown_spec.js
@@ -1,34 +1,161 @@
-// TODO: https://gitlab.com/gitlab-org/gitlab-ce/issues/48034
import { shallowMount, createLocalVue } from '@vue/test-utils';
import CompareVersionsDropdown from '~/diffs/components/compare_versions_dropdown.vue';
import diffsMockData from '../mock_data/merge_request_diffs';
+import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
+
+const localVue = createLocalVue();
+const targetBranch = { branchName: 'tmp-wine-dev', versionIndex: -1 };
+const startVersion = { version_index: 4 };
+const mergeRequestVersion = {
+ version_path: '123',
+};
+const baseVersionPath = '/gnuwget/wget2/merge_requests/6/diffs?diff_id=37';
describe('CompareVersionsDropdown', () => {
let wrapper;
- const targetBranch = { branchName: 'tmp-wine-dev', versionIndex: -1 };
- const factory = (options = {}) => {
- const localVue = createLocalVue();
+ const findSelectedVersion = () => wrapper.find('.dropdown-menu-toggle');
+ const findVersionsListElements = () => wrapper.findAll('li');
+ const findLinkElement = index =>
+ findVersionsListElements()
+ .at(index)
+ .find('a');
+ const findLastLink = () => findLinkElement(findVersionsListElements().length - 1);
- wrapper = shallowMount(CompareVersionsDropdown, { localVue, ...options });
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(localVue.extend(CompareVersionsDropdown), {
+ localVue,
+ sync: false,
+ propsData: { ...props },
+ });
};
afterEach(() => {
wrapper.destroy();
});
- it('should render a correct base version link', () => {
- factory({
- propsData: {
- baseVersionPath: '/gnuwget/wget2/merge_requests/6/diffs?diff_id=37',
+ describe('selected version name', () => {
+ it('shows latest version when latest is selected', () => {
+ createComponent({
+ mergeRequestVersion,
+ startVersion,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findSelectedVersion().text()).toBe('latest version');
+ });
+
+ it('shows target branch name for base branch', () => {
+ createComponent({
+ targetBranch,
+ });
+
+ expect(findSelectedVersion().text()).toBe('tmp-wine-dev');
+ });
+
+ it('shows correct version for non-base and non-latest branches', () => {
+ createComponent({
+ startVersion,
+ targetBranch,
+ });
+
+ expect(findSelectedVersion().text()).toBe(`version ${startVersion.version_index}`);
+ });
+ });
+
+ describe('target versions list', () => {
+ it('should have the same length as otherVersions if merge request version is present', () => {
+ createComponent({
+ mergeRequestVersion,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findVersionsListElements().length).toEqual(diffsMockData.length);
+ });
+
+ it('should have an otherVersions length plus 1 if no merge request version is present', () => {
+ createComponent({
+ targetBranch,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findVersionsListElements().length).toEqual(diffsMockData.length + 1);
+ });
+
+ it('should have base branch link as active on base branch', () => {
+ createComponent({
+ targetBranch,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findLastLink().classes()).toContain('is-active');
+ });
+
+ it('should have correct branch link as active if start version present', () => {
+ createComponent({
+ targetBranch,
+ startVersion,
+ otherVersions: diffsMockData,
+ });
+
+ expect(findLinkElement(0).classes()).toContain('is-active');
+ });
+
+ it('should render a correct base version link', () => {
+ createComponent({
+ baseVersionPath,
otherVersions: diffsMockData.slice(1),
targetBranch,
- },
+ });
+
+ expect(findLastLink().attributes('href')).toEqual(baseVersionPath);
+ expect(findLastLink().text()).toContain('(base)');
+ });
+
+ it('should not render commits count if no showCommitsCount is passed', () => {
+ createComponent({
+ otherVersions: diffsMockData,
+ targetBranch,
+ });
+
+ const commitsCount = diffsMockData[0].commits_count;
+
+ expect(findLinkElement(0).text()).not.toContain(`${commitsCount} commit`);
+ });
+
+ it('should render correct commits count if showCommitsCount is passed', () => {
+ createComponent({
+ otherVersions: diffsMockData,
+ targetBranch,
+ showCommitCount: true,
+ });
+
+ const commitsCount = diffsMockData[0].commits_count;
+
+ expect(findLinkElement(0).text()).toContain(`${commitsCount} commit`);
+ });
+
+ it('should render correct commit sha', () => {
+ createComponent({
+ otherVersions: diffsMockData,
+ targetBranch,
+ });
+
+ const commitShaElement = findLinkElement(0).find('.commit-sha');
+
+ expect(commitShaElement.text()).toBe(diffsMockData[0].short_commit_sha);
});
- const links = wrapper.findAll('a');
- const lastLink = links.wrappers[links.length - 1];
+ it('should render correct time-ago ', () => {
+ createComponent({
+ otherVersions: diffsMockData,
+ targetBranch,
+ });
+
+ const timeAgoElement = findLinkElement(0).find(TimeAgo);
- expect(lastLink.attributes('href')).toEqual(wrapper.props('baseVersionPath'));
+ expect(timeAgoElement.exists()).toBe(true);
+ expect(timeAgoElement.props('time')).toBe(diffsMockData[0].created_at);
+ });
});
});
diff --git a/spec/javascripts/diffs/components/diff_content_spec.js b/spec/javascripts/diffs/components/diff_content_spec.js
index a1bb51963d6..bc9288e4150 100644
--- a/spec/javascripts/diffs/components/diff_content_spec.js
+++ b/spec/javascripts/diffs/components/diff_content_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import DiffContentComponent from '~/diffs/components/diff_content.vue';
-import { createStore } from '~/mr_notes/stores';
+import { createStore } from 'ee_else_ce/mr_notes/stores';
import { mountComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
import '~/behaviors/markdown/render_gfm';
diff --git a/spec/javascripts/diffs/components/diff_file_header_spec.js b/spec/javascripts/diffs/components/diff_file_header_spec.js
index 005a4751ea1..66c5b17b825 100644
--- a/spec/javascripts/diffs/components/diff_file_header_spec.js
+++ b/spec/javascripts/diffs/components/diff_file_header_spec.js
@@ -23,6 +23,9 @@ describe('diff_file_header', () => {
});
beforeEach(() => {
+ gon.features = {
+ expandDiffFullFile: true,
+ };
const diffFile = diffDiscussionMock.diff_file;
diffFile.added_lines = 2;
@@ -382,7 +385,7 @@ describe('diff_file_header', () => {
props.diffFile.edit_path = '/';
vm = mountComponentWithStore(Component, { props, store });
- expect(vm.$el.querySelector('.js-edit-blob')).toContainText('Edit');
+ expect(vm.$el.querySelector('.js-edit-blob')).not.toBe(null);
});
it('should not show edit button when file is deleted', () => {
@@ -491,5 +494,151 @@ describe('diff_file_header', () => {
});
});
});
+
+ describe('file actions', () => {
+ it('should not render if diff file has a submodule', () => {
+ props.diffFile.submodule = 'submodule';
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(vm.$el.querySelector('.file-actions')).toEqual(null);
+ });
+
+ it('should not render if add merge request buttons is false', () => {
+ props.addMergeRequestButtons = false;
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(vm.$el.querySelector('.file-actions')).toEqual(null);
+ });
+
+ describe('with add merge request buttons enabled', () => {
+ beforeEach(() => {
+ props.addMergeRequestButtons = true;
+ props.diffFile.edit_path = 'edit-path';
+ });
+
+ const viewReplacedFileButton = () => vm.$el.querySelector('.js-view-replaced-file');
+ const viewFileButton = () => vm.$el.querySelector('.js-view-file-button');
+ const externalUrl = () => vm.$el.querySelector('.js-external-url');
+
+ it('should render if add merge request buttons is true and diff file does not have a submodule', () => {
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(vm.$el.querySelector('.file-actions')).not.toEqual(null);
+ });
+
+ it('should not render view replaced file button if no replaced view path is present', () => {
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(viewReplacedFileButton()).toEqual(null);
+ });
+
+ it('should render view replaced file button if replaced view path is present', () => {
+ props.diffFile.replaced_view_path = 'replaced-view-path';
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(viewReplacedFileButton()).not.toEqual(null);
+ expect(viewReplacedFileButton().getAttribute('href')).toBe('replaced-view-path');
+ });
+
+ it('should render correct file view button path', () => {
+ props.diffFile.view_path = 'view-path';
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(viewFileButton().getAttribute('href')).toBe('view-path');
+ });
+
+ it('should not render external url view link if diff file has no external url', () => {
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(externalUrl()).toEqual(null);
+ });
+
+ it('should render external url view link if diff file has external url', () => {
+ props.diffFile.external_url = 'external_url';
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(externalUrl()).not.toEqual(null);
+ expect(externalUrl().getAttribute('href')).toBe('external_url');
+ });
+ });
+
+ describe('without file blob', () => {
+ beforeEach(() => {
+ props.diffFile.blob = null;
+ props.addMergeRequestButtons = true;
+ vm = mountComponentWithStore(Component, { props, store });
+ });
+
+ it('should not render toggle discussions button', () => {
+ expect(vm.$el.querySelector('.js-btn-vue-toggle-comments')).toEqual(null);
+ });
+
+ it('should not render edit button', () => {
+ expect(vm.$el.querySelector('.js-edit-blob')).toEqual(null);
+ });
+ });
+ });
+ });
+
+ describe('expand full file button', () => {
+ beforeEach(() => {
+ props.addMergeRequestButtons = true;
+ props.diffFile.edit_path = '/';
+ });
+
+ it('does not render button', () => {
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(vm.$el.querySelector('.js-expand-file')).toBe(null);
+ });
+
+ it('renders button', () => {
+ props.diffFile.is_fully_expanded = false;
+
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(vm.$el.querySelector('.js-expand-file')).not.toBe(null);
+ });
+
+ it('shows fully expanded text', () => {
+ props.diffFile.is_fully_expanded = false;
+ props.diffFile.isShowingFullFile = true;
+
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(vm.$el.querySelector('.js-expand-file').textContent).toContain('Show changes only');
+ });
+
+ it('shows expand text', () => {
+ props.diffFile.is_fully_expanded = false;
+
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(vm.$el.querySelector('.js-expand-file').textContent).toContain('Show full file');
+ });
+
+ it('renders loading icon', () => {
+ props.diffFile.is_fully_expanded = false;
+ props.diffFile.isLoadingFullFile = true;
+
+ vm = mountComponentWithStore(Component, { props, store });
+
+ expect(vm.$el.querySelector('.js-expand-file .loading-container')).not.toBe(null);
+ });
+
+ it('calls toggleFullDiff on click', () => {
+ props.diffFile.is_fully_expanded = false;
+
+ vm = mountComponentWithStore(Component, { props, store });
+
+ spyOn(vm.$store, 'dispatch').and.stub();
+
+ vm.$el.querySelector('.js-expand-file').click();
+
+ expect(vm.$store.dispatch).toHaveBeenCalledWith(
+ 'diffs/toggleFullDiff',
+ props.diffFile.file_path,
+ );
+ });
});
});
diff --git a/spec/javascripts/diffs/components/diff_file_spec.js b/spec/javascripts/diffs/components/diff_file_spec.js
index 65a1c9b8f15..ba04c8c4a4c 100644
--- a/spec/javascripts/diffs/components/diff_file_spec.js
+++ b/spec/javascripts/diffs/components/diff_file_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import DiffFileComponent from '~/diffs/components/diff_file.vue';
import { diffViewerModes, diffViewerErrors } from '~/ide/constants';
-import store from '~/mr_notes/stores';
+import store from 'ee_else_ce/mr_notes/stores';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import diffFileMockData from '../mock_data/diff_file';
diff --git a/spec/javascripts/diffs/components/edit_button_spec.js b/spec/javascripts/diffs/components/edit_button_spec.js
index 7237274eb43..ccdae4cb312 100644
--- a/spec/javascripts/diffs/components/edit_button_spec.js
+++ b/spec/javascripts/diffs/components/edit_button_spec.js
@@ -1 +1,61 @@
-// TODO: https://gitlab.com/gitlab-org/gitlab-ce/issues/48034
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import EditButton from '~/diffs/components/edit_button.vue';
+
+const localVue = createLocalVue();
+const editPath = 'test-path';
+
+describe('EditButton', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(EditButton, {
+ localVue,
+ sync: false,
+ propsData: { ...props },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('has correct href attribute', () => {
+ createComponent({
+ editPath,
+ canCurrentUserFork: false,
+ });
+
+ expect(wrapper.attributes('href')).toBe(editPath);
+ });
+
+ it('emits a show fork message event if current user can fork', () => {
+ createComponent({
+ editPath,
+ canCurrentUserFork: true,
+ });
+ wrapper.trigger('click');
+
+ expect(wrapper.emitted('showForkMessage')).toBeTruthy();
+ });
+
+ it('doesnt emit a show fork message event if current user cannot fork', () => {
+ createComponent({
+ editPath,
+ canCurrentUserFork: false,
+ });
+ wrapper.trigger('click');
+
+ expect(wrapper.emitted('showForkMessage')).toBeFalsy();
+ });
+
+ it('doesnt emit a show fork message event if current user can modify blob', () => {
+ createComponent({
+ editPath,
+ canCurrentUserFork: true,
+ canModifyBlob: true,
+ });
+ wrapper.trigger('click');
+
+ expect(wrapper.emitted('showForkMessage')).toBeFalsy();
+ });
+});
diff --git a/spec/javascripts/diffs/components/hidden_files_warning_spec.js b/spec/javascripts/diffs/components/hidden_files_warning_spec.js
index 7237274eb43..5bf5ddd27bd 100644
--- a/spec/javascripts/diffs/components/hidden_files_warning_spec.js
+++ b/spec/javascripts/diffs/components/hidden_files_warning_spec.js
@@ -1 +1,48 @@
-// TODO: https://gitlab.com/gitlab-org/gitlab-ce/issues/48034
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import HiddenFilesWarning from '~/diffs/components/hidden_files_warning.vue';
+
+const localVue = createLocalVue();
+const propsData = {
+ total: '10',
+ visible: 5,
+ plainDiffPath: 'plain-diff-path',
+ emailPatchPath: 'email-patch-path',
+};
+
+describe('HiddenFilesWarning', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(HiddenFilesWarning, {
+ localVue,
+ sync: false,
+ propsData,
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('has a correct plain diff URL', () => {
+ const plainDiffLink = wrapper.findAll('a').wrappers.filter(x => x.text() === 'Plain diff')[0];
+
+ expect(plainDiffLink.attributes('href')).toBe(propsData.plainDiffPath);
+ });
+
+ it('has a correct email patch URL', () => {
+ const emailPatchLink = wrapper.findAll('a').wrappers.filter(x => x.text() === 'Email patch')[0];
+
+ expect(emailPatchLink.attributes('href')).toBe(propsData.emailPatchPath);
+ });
+
+ it('has a correct visible/total files text', () => {
+ const filesText = wrapper.find('strong');
+
+ expect(filesText.text()).toBe('5 of 10');
+ });
+});
diff --git a/spec/javascripts/diffs/components/inline_diff_view_spec.js b/spec/javascripts/diffs/components/inline_diff_view_spec.js
index 2316ee29106..4452106580a 100644
--- a/spec/javascripts/diffs/components/inline_diff_view_spec.js
+++ b/spec/javascripts/diffs/components/inline_diff_view_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import '~/behaviors/markdown/render_gfm';
import InlineDiffView from '~/diffs/components/inline_diff_view.vue';
-import store from '~/mr_notes/stores';
+import store from 'ee_else_ce/mr_notes/stores';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import diffFileMockData from '../mock_data/diff_file';
import discussionsMockData from '../mock_data/diff_discussions';
diff --git a/spec/javascripts/diffs/components/parallel_diff_view_spec.js b/spec/javascripts/diffs/components/parallel_diff_view_spec.js
index 6f6b1c41915..236bda96145 100644
--- a/spec/javascripts/diffs/components/parallel_diff_view_spec.js
+++ b/spec/javascripts/diffs/components/parallel_diff_view_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import ParallelDiffView from '~/diffs/components/parallel_diff_view.vue';
-import store from '~/mr_notes/stores';
+import store from 'ee_else_ce/mr_notes/stores';
import * as constants from '~/diffs/constants';
import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
import diffFileMockData from '../mock_data/diff_file';
@@ -18,6 +18,10 @@ describe('ParallelDiffView', () => {
}).$mount();
});
+ afterEach(() => {
+ component.$destroy();
+ });
+
describe('assigned', () => {
describe('diffLines', () => {
it('should normalize lines for empty cells', () => {
diff --git a/spec/javascripts/diffs/mock_data/diff_discussions.js b/spec/javascripts/diffs/mock_data/diff_discussions.js
index 4a091b4580b..fd5dd611383 100644
--- a/spec/javascripts/diffs/mock_data/diff_discussions.js
+++ b/spec/javascripts/diffs/mock_data/diff_discussions.js
@@ -288,6 +288,7 @@ export default {
external_storage: null,
old_path_html: 'CHANGELOG_OLD',
new_path_html: 'CHANGELOG',
+ is_fully_expanded: true,
context_lines_path:
'/gitlab-org/gitlab-test/blob/c48ee0d1bf3b30453f5b32250ce03134beaa6d13/CHANGELOG/diff',
highlighted_diff_lines: [
diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js
index e47c7906fcb..070bfb2ccd0 100644
--- a/spec/javascripts/diffs/store/actions_spec.js
+++ b/spec/javascripts/diffs/store/actions_spec.js
@@ -30,6 +30,11 @@ import actions, {
setRenderTreeList,
setShowWhitespace,
setRenderIt,
+ requestFullDiff,
+ receiveFullDiffSucess,
+ receiveFullDiffError,
+ fetchFullDiff,
+ toggleFullDiff,
} from '~/diffs/store/actions';
import eventHub from '~/notes/event_hub';
import * as types from '~/diffs/store/mutation_types';
@@ -847,4 +852,129 @@ describe('DiffsStoreActions', () => {
testAction(setRenderIt, 'file', {}, [{ type: types.RENDER_FILE, payload: 'file' }], [], done);
});
});
+
+ describe('requestFullDiff', () => {
+ it('commits REQUEST_FULL_DIFF', done => {
+ testAction(
+ requestFullDiff,
+ 'file',
+ {},
+ [{ type: types.REQUEST_FULL_DIFF, payload: 'file' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFullDiffSucess', () => {
+ it('commits REQUEST_FULL_DIFF', done => {
+ testAction(
+ receiveFullDiffSucess,
+ { filePath: 'test', data: 'test' },
+ {},
+ [{ type: types.RECEIVE_FULL_DIFF_SUCCESS, payload: { filePath: 'test', data: 'test' } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFullDiffError', () => {
+ it('commits REQUEST_FULL_DIFF', done => {
+ testAction(
+ receiveFullDiffError,
+ 'file',
+ {},
+ [{ type: types.RECEIVE_FULL_DIFF_ERROR, payload: 'file' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchFullDiff', () => {
+ let mock;
+ let scrollToElementSpy;
+
+ beforeEach(() => {
+ scrollToElementSpy = spyOnDependency(actions, 'scrollToElement').and.stub();
+
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onGet(`${gl.TEST_HOST}/context`).replyOnce(200, ['test']);
+ });
+
+ it('dispatches receiveFullDiffSucess', done => {
+ testAction(
+ fetchFullDiff,
+ { context_lines_path: `${gl.TEST_HOST}/context`, file_path: 'test', file_hash: 'test' },
+ null,
+ [],
+ [{ type: 'receiveFullDiffSucess', payload: { filePath: 'test', data: ['test'] } }],
+ done,
+ );
+ });
+
+ it('scrolls to element', done => {
+ fetchFullDiff(
+ { dispatch() {} },
+ { context_lines_path: `${gl.TEST_HOST}/context`, file_path: 'test', file_hash: 'test' },
+ )
+ .then(() => {
+ expect(scrollToElementSpy).toHaveBeenCalledWith('#test');
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(`${gl.TEST_HOST}/context`).replyOnce(500);
+ });
+
+ it('dispatches receiveFullDiffError', done => {
+ testAction(
+ fetchFullDiff,
+ { context_lines_path: `${gl.TEST_HOST}/context`, file_path: 'test', file_hash: 'test' },
+ null,
+ [],
+ [{ type: 'receiveFullDiffError', payload: 'test' }],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('toggleFullDiff', () => {
+ let state;
+
+ beforeEach(() => {
+ state = {
+ diffFiles: [{ file_path: 'test', isShowingFullFile: false }],
+ };
+ });
+
+ it('dispatches fetchFullDiff when file is not expanded', done => {
+ testAction(
+ toggleFullDiff,
+ 'test',
+ state,
+ [],
+ [
+ { type: 'requestFullDiff', payload: 'test' },
+ { type: 'fetchFullDiff', payload: state.diffFiles[0] },
+ ],
+ done,
+ );
+ });
+ });
});
diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js
index 09ee691b602..270e7d75666 100644
--- a/spec/javascripts/diffs/store/mutations_spec.js
+++ b/spec/javascripts/diffs/store/mutations_spec.js
@@ -680,4 +680,66 @@ describe('DiffsStoreMutations', () => {
expect(state.showWhitespace).toBe(false);
});
});
+
+ describe('REQUEST_FULL_DIFF', () => {
+ it('sets isLoadingFullFile to true', () => {
+ const state = {
+ diffFiles: [{ file_path: 'test', isLoadingFullFile: false }],
+ };
+
+ mutations[types.REQUEST_FULL_DIFF](state, 'test');
+
+ expect(state.diffFiles[0].isLoadingFullFile).toBe(true);
+ });
+ });
+
+ describe('RECEIVE_FULL_DIFF_ERROR', () => {
+ it('sets isLoadingFullFile to false', () => {
+ const state = {
+ diffFiles: [{ file_path: 'test', isLoadingFullFile: true }],
+ };
+
+ mutations[types.RECEIVE_FULL_DIFF_ERROR](state, 'test');
+
+ expect(state.diffFiles[0].isLoadingFullFile).toBe(false);
+ });
+ });
+
+ describe('RECEIVE_FULL_DIFF_SUCCESS', () => {
+ it('sets isLoadingFullFile to false', () => {
+ const state = {
+ diffFiles: [
+ {
+ file_path: 'test',
+ isLoadingFullFile: true,
+ isShowingFullFile: false,
+ highlighted_diff_lines: [],
+ parallel_diff_lines: [],
+ },
+ ],
+ };
+
+ mutations[types.RECEIVE_FULL_DIFF_SUCCESS](state, { filePath: 'test', data: [] });
+
+ expect(state.diffFiles[0].isLoadingFullFile).toBe(false);
+ });
+
+ it('sets isShowingFullFile to true', () => {
+ const state = {
+ diffFiles: [
+ {
+ file_path: 'test',
+ isLoadingFullFile: true,
+ isShowingFullFile: false,
+ highlighted_diff_lines: [],
+ parallel_diff_lines: [],
+ },
+ ],
+ };
+
+ mutations[types.RECEIVE_FULL_DIFF_SUCCESS](state, { filePath: 'test', data: [] });
+
+ expect(state.diffFiles[0].isShowingFullFile).toBe(true);
+ });
+ });
});
diff --git a/spec/javascripts/diffs/store/utils_spec.js b/spec/javascripts/diffs/store/utils_spec.js
index 599ea9cd420..1f877910125 100644
--- a/spec/javascripts/diffs/store/utils_spec.js
+++ b/spec/javascripts/diffs/store/utils_spec.js
@@ -781,4 +781,49 @@ describe('DiffsStoreUtils', () => {
]);
});
});
+
+ describe('convertExpandLines', () => {
+ it('converts expanded lines to normal lines', () => {
+ const diffLines = [
+ {
+ type: 'match',
+ old_line: 1,
+ new_line: 1,
+ },
+ {
+ type: '',
+ old_line: 2,
+ new_line: 2,
+ },
+ ];
+
+ const lines = utils.convertExpandLines({
+ diffLines,
+ data: [{ text: 'expanded' }],
+ typeKey: 'type',
+ oldLineKey: 'old_line',
+ newLineKey: 'new_line',
+ mapLine: ({ line, oldLine, newLine }) => ({
+ ...line,
+ old_line: oldLine,
+ new_line: newLine,
+ }),
+ });
+
+ expect(lines).toEqual([
+ {
+ text: 'expanded',
+ new_line: 1,
+ old_line: 1,
+ discussions: [],
+ hasForm: false,
+ },
+ {
+ type: '',
+ old_line: 2,
+ new_line: 2,
+ },
+ ]);
+ });
+ });
});
diff --git a/spec/javascripts/dirty_submit/dirty_submit_form_spec.js b/spec/javascripts/dirty_submit/dirty_submit_form_spec.js
index ae2a785de52..95cc90dcb0f 100644
--- a/spec/javascripts/dirty_submit/dirty_submit_form_spec.js
+++ b/spec/javascripts/dirty_submit/dirty_submit_form_spec.js
@@ -13,6 +13,8 @@ function expectToToggleDisableOnDirtyUpdate(submit, input) {
}
describe('DirtySubmitForm', () => {
+ DirtySubmitForm.THROTTLE_DURATION = 0;
+
it('disables submit until there are changes', done => {
const { form, input, submit } = createForm();
diff --git a/spec/javascripts/emoji_spec.js b/spec/javascripts/emoji_spec.js
index 3db4d9800f1..0ac375145be 100644
--- a/spec/javascripts/emoji_spec.js
+++ b/spec/javascripts/emoji_spec.js
@@ -1,4 +1,6 @@
-import { glEmojiTag } from '~/emoji';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import { initEmojiMap, glEmojiTag, EMOJI_VERSION } from '~/emoji';
import isEmojiUnicodeSupported, {
isFlagEmoji,
isRainbowFlagEmoji,
@@ -7,6 +9,7 @@ import isEmojiUnicodeSupported, {
isHorceRacingSkinToneComboEmoji,
isPersonZwjEmoji,
} from '~/emoji/support/is_emoji_unicode_supported';
+import installGlEmojiElement from '~/behaviors/gl_emoji';
const emptySupportMap = {
personZwj: false,
@@ -31,34 +34,35 @@ const emojiFixtureMap = {
bomb: {
name: 'bomb',
moji: '💣',
- unicodeVersion: '6.0',
+ uni: '6.0',
},
construction_worker_tone5: {
name: 'construction_worker_tone5',
moji: '👷🏿',
- unicodeVersion: '8.0',
+ uni: '8.0',
},
five: {
name: 'five',
moji: '5️⃣',
- unicodeVersion: '3.0',
+ uni: '3.0',
},
grey_question: {
name: 'grey_question',
moji: '❔',
- unicodeVersion: '6.0',
+ uni: '6.0',
},
};
function markupToDomElement(markup) {
const div = document.createElement('div');
div.innerHTML = markup;
+ document.body.appendChild(div);
return div.firstElementChild;
}
-function testGlEmojiImageFallback(element, name, src) {
+function testGlEmojiImageFallback(element, name) {
expect(element.tagName.toLowerCase()).toBe('img');
- expect(element.getAttribute('src')).toBe(src);
+ expect(element.getAttribute('src')).toBe(`/-/emojis/${EMOJI_VERSION}/${name}.png`);
expect(element.getAttribute('title')).toBe(`:${name}:`);
expect(element.getAttribute('alt')).toBe(`:${name}:`);
}
@@ -68,12 +72,11 @@ const defaults = {
sprite: false,
};
-function testGlEmojiElement(element, name, unicodeVersion, unicodeMoji, options = {}) {
+function testGlEmojiElement(element, name, uni, unicodeMoji, options = {}) {
const opts = Object.assign({}, defaults, options);
expect(element.tagName.toLowerCase()).toBe('gl-emoji');
expect(element.dataset.name).toBe(name);
- expect(element.dataset.fallbackSrc.length).toBeGreaterThan(0);
- expect(element.dataset.unicodeVersion).toBe(unicodeVersion);
+ expect(element.dataset.uni).toBe(uni);
const fallbackSpriteClass = `emoji-${name}`;
if (opts.sprite) {
@@ -86,7 +89,7 @@ function testGlEmojiElement(element, name, unicodeVersion, unicodeMoji, options
if (opts.forceFallback && !opts.sprite) {
// Check for image fallback
- testGlEmojiImageFallback(element.firstElementChild, name, element.dataset.fallbackSrc);
+ testGlEmojiImageFallback(element.firstElementChild, name);
} else {
// Otherwise make sure things are still unicode text
expect(element.textContent.trim()).toBe(unicodeMoji);
@@ -94,101 +97,143 @@ function testGlEmojiElement(element, name, unicodeVersion, unicodeMoji, options
}
describe('gl_emoji', () => {
+ beforeAll(() => {
+ installGlEmojiElement();
+ });
+
+ let mock;
+ const emojiData = getJSONFixture('emojis/emojis.json');
+
+ beforeEach(function(done) {
+ mock = new MockAdapter(axios);
+ mock.onGet(`/-/emojis/${EMOJI_VERSION}/emojis.json`).reply(200, emojiData);
+
+ initEmojiMap()
+ .then(() => {
+ done();
+ })
+ .catch(() => {
+ done();
+ });
+ });
+
+ afterEach(function() {
+ mock.restore();
+ });
+
describe('glEmojiTag', () => {
- it('bomb emoji', () => {
+ it('bomb emoji', done => {
const emojiKey = 'bomb';
const markup = glEmojiTag(emojiFixtureMap[emojiKey].name);
const glEmojiElement = markupToDomElement(markup);
- testGlEmojiElement(
- glEmojiElement,
- emojiFixtureMap[emojiKey].name,
- emojiFixtureMap[emojiKey].unicodeVersion,
- emojiFixtureMap[emojiKey].moji,
- );
+ setTimeout(() => {
+ testGlEmojiElement(
+ glEmojiElement,
+ emojiFixtureMap[emojiKey].name,
+ emojiFixtureMap[emojiKey].uni,
+ emojiFixtureMap[emojiKey].moji,
+ );
+ done();
+ });
});
- it('bomb emoji with image fallback', () => {
+ it('bomb emoji with image fallback', done => {
const emojiKey = 'bomb';
const markup = glEmojiTag(emojiFixtureMap[emojiKey].name, {
forceFallback: true,
});
const glEmojiElement = markupToDomElement(markup);
- testGlEmojiElement(
- glEmojiElement,
- emojiFixtureMap[emojiKey].name,
- emojiFixtureMap[emojiKey].unicodeVersion,
- emojiFixtureMap[emojiKey].moji,
- {
- forceFallback: true,
- },
- );
+ setTimeout(() => {
+ testGlEmojiElement(
+ glEmojiElement,
+ emojiFixtureMap[emojiKey].name,
+ emojiFixtureMap[emojiKey].uni,
+ emojiFixtureMap[emojiKey].moji,
+ {
+ forceFallback: true,
+ },
+ );
+ done();
+ });
});
- it('bomb emoji with sprite fallback readiness', () => {
+ it('bomb emoji with sprite fallback readiness', done => {
const emojiKey = 'bomb';
const markup = glEmojiTag(emojiFixtureMap[emojiKey].name, {
sprite: true,
});
const glEmojiElement = markupToDomElement(markup);
- testGlEmojiElement(
- glEmojiElement,
- emojiFixtureMap[emojiKey].name,
- emojiFixtureMap[emojiKey].unicodeVersion,
- emojiFixtureMap[emojiKey].moji,
- {
- sprite: true,
- },
- );
+ setTimeout(() => {
+ testGlEmojiElement(
+ glEmojiElement,
+ emojiFixtureMap[emojiKey].name,
+ emojiFixtureMap[emojiKey].uni,
+ emojiFixtureMap[emojiKey].moji,
+ {
+ sprite: true,
+ },
+ );
+ done();
+ });
});
- it('bomb emoji with sprite fallback', () => {
+ it('bomb emoji with sprite fallback', done => {
const emojiKey = 'bomb';
const markup = glEmojiTag(emojiFixtureMap[emojiKey].name, {
forceFallback: true,
sprite: true,
});
const glEmojiElement = markupToDomElement(markup);
- testGlEmojiElement(
- glEmojiElement,
- emojiFixtureMap[emojiKey].name,
- emojiFixtureMap[emojiKey].unicodeVersion,
- emojiFixtureMap[emojiKey].moji,
- {
- forceFallback: true,
- sprite: true,
- },
- );
+ setTimeout(() => {
+ testGlEmojiElement(
+ glEmojiElement,
+ emojiFixtureMap[emojiKey].name,
+ emojiFixtureMap[emojiKey].uni,
+ emojiFixtureMap[emojiKey].moji,
+ {
+ forceFallback: true,
+ sprite: true,
+ },
+ );
+ done();
+ });
});
- it('question mark when invalid emoji name given', () => {
+ it('question mark when invalid emoji name given', done => {
const name = 'invalid_emoji';
const emojiKey = 'grey_question';
const markup = glEmojiTag(name);
const glEmojiElement = markupToDomElement(markup);
- testGlEmojiElement(
- glEmojiElement,
- emojiFixtureMap[emojiKey].name,
- emojiFixtureMap[emojiKey].unicodeVersion,
- emojiFixtureMap[emojiKey].moji,
- );
+ setTimeout(() => {
+ testGlEmojiElement(
+ glEmojiElement,
+ emojiFixtureMap[emojiKey].name,
+ emojiFixtureMap[emojiKey].uni,
+ emojiFixtureMap[emojiKey].moji,
+ );
+ done();
+ });
});
- it('question mark with image fallback when invalid emoji name given', () => {
+ it('question mark with image fallback when invalid emoji name given', done => {
const name = 'invalid_emoji';
const emojiKey = 'grey_question';
const markup = glEmojiTag(name, {
forceFallback: true,
});
const glEmojiElement = markupToDomElement(markup);
- testGlEmojiElement(
- glEmojiElement,
- emojiFixtureMap[emojiKey].name,
- emojiFixtureMap[emojiKey].unicodeVersion,
- emojiFixtureMap[emojiKey].moji,
- {
- forceFallback: true,
- },
- );
+ setTimeout(() => {
+ testGlEmojiElement(
+ glEmojiElement,
+ emojiFixtureMap[emojiKey].name,
+ emojiFixtureMap[emojiKey].uni,
+ emojiFixtureMap[emojiKey].moji,
+ {
+ forceFallback: true,
+ },
+ );
+ done();
+ });
});
});
@@ -389,7 +434,7 @@ describe('gl_emoji', () => {
const isSupported = isEmojiUnicodeSupported(
unicodeSupportMap,
emojiFixtureMap[emojiKey].moji,
- emojiFixtureMap[emojiKey].unicodeVersion,
+ emojiFixtureMap[emojiKey].uni,
);
expect(isSupported).toBeTruthy();
@@ -401,7 +446,7 @@ describe('gl_emoji', () => {
const isSupported = isEmojiUnicodeSupported(
unicodeSupportMap,
emojiFixtureMap[emojiKey].moji,
- emojiFixtureMap[emojiKey].unicodeVersion,
+ emojiFixtureMap[emojiKey].uni,
);
expect(isSupported).toBeFalsy();
@@ -415,7 +460,7 @@ describe('gl_emoji', () => {
const isSupported = isEmojiUnicodeSupported(
unicodeSupportMap,
emojiFixtureMap[emojiKey].moji,
- emojiFixtureMap[emojiKey].unicodeVersion,
+ emojiFixtureMap[emojiKey].uni,
);
expect(isSupported).toBeFalsy();
@@ -441,7 +486,7 @@ describe('gl_emoji', () => {
const isSupported = isEmojiUnicodeSupported(
unicodeSupportMap,
emojiFixtureMap[emojiKey].moji,
- emojiFixtureMap[emojiKey].unicodeVersion,
+ emojiFixtureMap[emojiKey].uni,
);
expect(isSupported).toBeFalsy();
@@ -459,7 +504,7 @@ describe('gl_emoji', () => {
const isSupported = isEmojiUnicodeSupported(
unicodeSupportMap,
emojiFixtureMap[emojiKey].moji,
- emojiFixtureMap[emojiKey].unicodeVersion,
+ emojiFixtureMap[emojiKey].uni,
);
expect(isSupported).toBeTruthy();
@@ -477,7 +522,7 @@ describe('gl_emoji', () => {
const isSupported = isEmojiUnicodeSupported(
unicodeSupportMap,
emojiFixtureMap[emojiKey].moji,
- emojiFixtureMap[emojiKey].unicodeVersion,
+ emojiFixtureMap[emojiKey].uni,
);
expect(isSupported).toBeFalsy();
diff --git a/spec/javascripts/environments/confirm_rollback_modal_spec.js b/spec/javascripts/environments/confirm_rollback_modal_spec.js
new file mode 100644
index 00000000000..05715bce38f
--- /dev/null
+++ b/spec/javascripts/environments/confirm_rollback_modal_spec.js
@@ -0,0 +1,70 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import ConfirmRollbackModal from '~/environments/components/confirm_rollback_modal.vue';
+import eventHub from '~/environments/event_hub';
+
+describe('Confirm Rollback Modal Component', () => {
+ let environment;
+
+ beforeEach(() => {
+ environment = {
+ name: 'test',
+ last_deployment: {
+ commit: {
+ short_id: 'abc0123',
+ },
+ },
+ modalId: 'test',
+ };
+ });
+
+ it('should show "Rollback" when isLastDeployment is false', () => {
+ const component = shallowMount(ConfirmRollbackModal, {
+ propsData: {
+ environment: {
+ ...environment,
+ isLastDeployment: false,
+ },
+ },
+ });
+ const modal = component.find(GlModal);
+
+ expect(modal.attributes('title')).toContain('Rollback');
+ expect(modal.attributes('title')).toContain('test');
+ expect(modal.attributes('ok-title')).toBe('Rollback');
+ expect(modal.text()).toContain('commit abc0123');
+ expect(modal.text()).toContain('Are you sure you want to continue?');
+ });
+
+ it('should show "Re-deploy" when isLastDeployment is true', () => {
+ const component = shallowMount(ConfirmRollbackModal, {
+ propsData: {
+ environment: {
+ ...environment,
+ isLastDeployment: true,
+ },
+ },
+ });
+ const modal = component.find(GlModal);
+
+ expect(modal.attributes('title')).toContain('Re-deploy');
+ expect(modal.attributes('title')).toContain('test');
+ expect(modal.attributes('ok-title')).toBe('Re-deploy');
+ expect(modal.text()).toContain('commit abc0123');
+ expect(modal.text()).toContain('Are you sure you want to continue?');
+ });
+
+ it('should emit the "rollback" event when "ok" is clicked', () => {
+ environment = { ...environment, isLastDeployment: true };
+ const component = shallowMount(ConfirmRollbackModal, {
+ propsData: {
+ environment,
+ },
+ });
+ const eventHubSpy = spyOn(eventHub, '$emit');
+ const modal = component.find(GlModal);
+ modal.vm.$emit('ok');
+
+ expect(eventHubSpy).toHaveBeenCalledWith('rollbackEnvironment', environment);
+ });
+});
diff --git a/spec/javascripts/environments/environment_rollback_spec.js b/spec/javascripts/environments/environment_rollback_spec.js
index 79f33c5bc8a..8c47f6a12c0 100644
--- a/spec/javascripts/environments/environment_rollback_spec.js
+++ b/spec/javascripts/environments/environment_rollback_spec.js
@@ -1,8 +1,11 @@
import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import eventHub from '~/environments/event_hub';
import rollbackComp from '~/environments/components/environment_rollback.vue';
describe('Rollback Component', () => {
- const retryURL = 'https://gitlab.com/retry';
+ const retryUrl = 'https://gitlab.com/retry';
let RollbackComponent;
beforeEach(() => {
@@ -13,8 +16,9 @@ describe('Rollback Component', () => {
const component = new RollbackComponent({
el: document.querySelector('.test-dom-element'),
propsData: {
- retryUrl: retryURL,
+ retryUrl,
isLastDeployment: true,
+ environment: {},
},
}).$mount();
@@ -25,11 +29,33 @@ describe('Rollback Component', () => {
const component = new RollbackComponent({
el: document.querySelector('.test-dom-element'),
propsData: {
- retryUrl: retryURL,
+ retryUrl,
isLastDeployment: false,
+ environment: {},
},
}).$mount();
expect(component.$el).toHaveSpriteIcon('redo');
});
+
+ it('should emit a "rollback" event on button click', () => {
+ const eventHubSpy = spyOn(eventHub, '$emit');
+ const component = shallowMount(RollbackComponent, {
+ propsData: {
+ retryUrl,
+ environment: {
+ name: 'test',
+ },
+ },
+ });
+ const button = component.find(GlButton);
+
+ button.vm.$emit('click');
+
+ expect(eventHubSpy).toHaveBeenCalledWith('requestRollbackEnvironment', {
+ retryUrl,
+ isLastDeployment: true,
+ name: 'test',
+ });
+ });
});
diff --git a/spec/javascripts/error_tracking/components/error_tracking_list_spec.js b/spec/javascripts/error_tracking/components/error_tracking_list_spec.js
index 08bbb390993..503af3920a8 100644
--- a/spec/javascripts/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/javascripts/error_tracking/components/error_tracking_list_spec.js
@@ -1,7 +1,7 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
import ErrorTrackingList from '~/error_tracking/components/error_tracking_list.vue';
-import { GlButton, GlEmptyState, GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { GlButton, GlEmptyState, GlLoadingIcon, GlTable, GlLink } from '@gitlab/ui';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -9,6 +9,7 @@ localVue.use(Vuex);
describe('ErrorTrackingList', () => {
let store;
let wrapper;
+ let actions;
function mountComponent({ errorTrackingEnabled = true } = {}) {
wrapper = shallowMount(ErrorTrackingList, {
@@ -20,12 +21,17 @@ describe('ErrorTrackingList', () => {
errorTrackingEnabled,
illustrationPath: 'illustration/path',
},
+ stubs: {
+ 'gl-link': GlLink,
+ },
});
}
beforeEach(() => {
- const actions = {
+ actions = {
getErrorList: () => {},
+ startPolling: () => {},
+ restartPolling: jasmine.createSpy('restartPolling'),
};
const state = {
@@ -83,6 +89,18 @@ describe('ErrorTrackingList', () => {
expect(wrapper.find(GlTable).exists()).toBeTruthy();
expect(wrapper.find(GlButton).exists()).toBeTruthy();
});
+
+ it('shows a message prompting to refresh', () => {
+ const refreshLink = wrapper.vm.$refs.empty.querySelector('a');
+
+ expect(refreshLink.textContent.trim()).toContain('Check again');
+ });
+
+ it('restarts polling', () => {
+ wrapper.find('.js-try-again').trigger('click');
+
+ expect(actions.restartPolling).toHaveBeenCalled();
+ });
});
describe('error tracking feature disabled', () => {
diff --git a/spec/javascripts/error_tracking_settings/components/app_spec.js b/spec/javascripts/error_tracking_settings/components/app_spec.js
new file mode 100644
index 00000000000..2e52a45fd34
--- /dev/null
+++ b/spec/javascripts/error_tracking_settings/components/app_spec.js
@@ -0,0 +1,63 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import ErrorTrackingSettings from '~/error_tracking_settings/components/app.vue';
+import ErrorTrackingForm from '~/error_tracking_settings/components/error_tracking_form.vue';
+import ProjectDropdown from '~/error_tracking_settings/components/project_dropdown.vue';
+import createStore from '~/error_tracking_settings/store';
+import { TEST_HOST } from 'spec/test_constants';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('error tracking settings app', () => {
+ let store;
+ let wrapper;
+
+ function mountComponent() {
+ wrapper = shallowMount(ErrorTrackingSettings, {
+ localVue,
+ store, // Override the imported store
+ propsData: {
+ initialEnabled: 'true',
+ initialApiHost: TEST_HOST,
+ initialToken: 'someToken',
+ initialProject: null,
+ listProjectsEndpoint: TEST_HOST,
+ operationsSettingsEndpoint: TEST_HOST,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ store = createStore();
+
+ mountComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('section', () => {
+ it('renders the form and dropdown', () => {
+ expect(wrapper.find(ErrorTrackingForm).exists()).toBeTruthy();
+ expect(wrapper.find(ProjectDropdown).exists()).toBeTruthy();
+ });
+
+ it('renders the Save Changes button', () => {
+ expect(wrapper.find('.js-error-tracking-button').exists()).toBeTruthy();
+ });
+
+ it('enables the button by default', () => {
+ expect(wrapper.find('.js-error-tracking-button').attributes('disabled')).toBeFalsy();
+ });
+
+ it('disables the button when saving', () => {
+ store.state.settingsLoading = true;
+
+ expect(wrapper.find('.js-error-tracking-button').attributes('disabled')).toBeTruthy();
+ });
+ });
+});
diff --git a/spec/javascripts/error_tracking_settings/components/error_tracking_form_spec.js b/spec/javascripts/error_tracking_settings/components/error_tracking_form_spec.js
new file mode 100644
index 00000000000..23e57c4bbf1
--- /dev/null
+++ b/spec/javascripts/error_tracking_settings/components/error_tracking_form_spec.js
@@ -0,0 +1,91 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlButton, GlFormInput } from '@gitlab/ui';
+import ErrorTrackingForm from '~/error_tracking_settings/components/error_tracking_form.vue';
+import { defaultProps } from '../mock';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('error tracking settings form', () => {
+ let wrapper;
+
+ function mountComponent() {
+ wrapper = shallowMount(ErrorTrackingForm, {
+ localVue,
+ propsData: defaultProps,
+ });
+ }
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('an empty form', () => {
+ it('is rendered', () => {
+ expect(wrapper.findAll(GlFormInput).length).toBe(2);
+ expect(wrapper.find(GlFormInput).attributes('id')).toBe('error-tracking-api-host');
+ expect(
+ wrapper
+ .findAll(GlFormInput)
+ .at(1)
+ .attributes('id'),
+ ).toBe('error-tracking-token');
+
+ expect(wrapper.findAll(GlButton).exists()).toBe(true);
+ });
+
+ it('is rendered with labels and placeholders', () => {
+ const pageText = wrapper.text();
+
+ expect(pageText).toContain('Find your hostname in your Sentry account settings page');
+ expect(pageText).toContain(
+ "After adding your Auth Token, use the 'Connect' button to load projects",
+ );
+
+ expect(pageText).not.toContain('Connection has failed. Re-check Auth Token and try again');
+ expect(
+ wrapper
+ .findAll(GlFormInput)
+ .at(0)
+ .attributes('placeholder'),
+ ).toContain('https://mysentryserver.com');
+ });
+ });
+
+ describe('after a successful connection', () => {
+ beforeEach(() => {
+ wrapper.setProps({ connectSuccessful: true });
+ });
+
+ it('shows the success checkmark', () => {
+ expect(wrapper.find('.js-error-tracking-connect-success').isVisible()).toBe(true);
+ });
+
+ it('does not show an error', () => {
+ expect(wrapper.text()).not.toContain(
+ 'Connection has failed. Re-check Auth Token and try again',
+ );
+ });
+ });
+
+ describe('after an unsuccessful connection', () => {
+ beforeEach(() => {
+ wrapper.setProps({ connectError: true });
+ });
+
+ it('does not show the check mark', () => {
+ expect(wrapper.find('.js-error-tracking-connect-success').isVisible()).toBe(false);
+ });
+
+ it('shows an error', () => {
+ expect(wrapper.text()).toContain('Connection has failed. Re-check Auth Token and try again');
+ });
+ });
+});
diff --git a/spec/javascripts/error_tracking_settings/components/project_dropdown_spec.js b/spec/javascripts/error_tracking_settings/components/project_dropdown_spec.js
new file mode 100644
index 00000000000..8e5dbe28452
--- /dev/null
+++ b/spec/javascripts/error_tracking_settings/components/project_dropdown_spec.js
@@ -0,0 +1,109 @@
+import _ from 'underscore';
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import ProjectDropdown from '~/error_tracking_settings/components/project_dropdown.vue';
+import { defaultProps, projectList, staleProject } from '../mock';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('error tracking settings project dropdown', () => {
+ let wrapper;
+
+ function mountComponent() {
+ wrapper = shallowMount(ProjectDropdown, {
+ localVue,
+ propsData: {
+ ..._.pick(
+ defaultProps,
+ 'dropdownLabel',
+ 'invalidProjectLabel',
+ 'projects',
+ 'projectSelectionLabel',
+ 'selectedProject',
+ 'token',
+ ),
+ hasProjects: false,
+ isProjectInvalid: false,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('empty project list', () => {
+ it('renders the dropdown', () => {
+ expect(wrapper.find('#project-dropdown').exists()).toBeTruthy();
+ expect(wrapper.find(GlDropdown).exists()).toBeTruthy();
+ });
+
+ it('shows helper text', () => {
+ expect(wrapper.find('.js-project-dropdown-label').exists()).toBeTruthy();
+ expect(wrapper.find('.js-project-dropdown-label').text()).toContain(
+ 'To enable project selection',
+ );
+ });
+
+ it('does not show an error', () => {
+ expect(wrapper.find('.js-project-dropdown-error').exists()).toBeFalsy();
+ });
+
+ it('does not contain any dropdown items', () => {
+ expect(wrapper.find(GlDropdownItem).exists()).toBeFalsy();
+ expect(wrapper.find(GlDropdown).props('text')).toBe('No projects available');
+ });
+ });
+
+ describe('populated project list', () => {
+ beforeEach(() => {
+ wrapper.setProps({ projects: _.clone(projectList), hasProjects: true });
+ });
+
+ it('renders the dropdown', () => {
+ expect(wrapper.find('#project-dropdown').exists()).toBeTruthy();
+ expect(wrapper.find(GlDropdown).exists()).toBeTruthy();
+ });
+
+ it('contains a number of dropdown items', () => {
+ expect(wrapper.find(GlDropdownItem).exists()).toBeTruthy();
+ expect(wrapper.findAll(GlDropdownItem).length).toBe(2);
+ });
+ });
+
+ describe('selected project', () => {
+ const selectedProject = _.clone(projectList[0]);
+
+ beforeEach(() => {
+ wrapper.setProps({ projects: _.clone(projectList), selectedProject, hasProjects: true });
+ });
+
+ it('does not show helper text', () => {
+ expect(wrapper.find('.js-project-dropdown-label').exists()).toBeFalsy();
+ expect(wrapper.find('.js-project-dropdown-error').exists()).toBeFalsy();
+ });
+ });
+
+ describe('invalid project selected', () => {
+ beforeEach(() => {
+ wrapper.setProps({
+ projects: _.clone(projectList),
+ selectedProject: staleProject,
+ isProjectInvalid: true,
+ });
+ });
+
+ it('displays a error', () => {
+ expect(wrapper.find('.js-project-dropdown-label').exists()).toBeFalsy();
+ expect(wrapper.find('.js-project-dropdown-error').exists()).toBeTruthy();
+ });
+ });
+});
diff --git a/spec/javascripts/error_tracking_settings/mock.js b/spec/javascripts/error_tracking_settings/mock.js
new file mode 100644
index 00000000000..32cdba33c14
--- /dev/null
+++ b/spec/javascripts/error_tracking_settings/mock.js
@@ -0,0 +1,92 @@
+import createStore from '~/error_tracking_settings/store';
+import { TEST_HOST } from 'spec/test_constants';
+
+const defaultStore = createStore();
+
+export const projectList = [
+ {
+ name: 'name',
+ slug: 'slug',
+ organizationName: 'organizationName',
+ organizationSlug: 'organizationSlug',
+ },
+ {
+ name: 'name2',
+ slug: 'slug2',
+ organizationName: 'organizationName2',
+ organizationSlug: 'organizationSlug2',
+ },
+];
+
+export const staleProject = {
+ name: 'staleName',
+ slug: 'staleSlug',
+ organizationName: 'staleOrganizationName',
+ organizationSlug: 'staleOrganizationSlug',
+};
+
+export const normalizedProject = {
+ name: 'name',
+ slug: 'slug',
+ organizationName: 'organization_name',
+ organizationSlug: 'organization_slug',
+};
+
+export const sampleBackendProject = {
+ name: normalizedProject.name,
+ slug: normalizedProject.slug,
+ organization_name: normalizedProject.organizationName,
+ organization_slug: normalizedProject.organizationSlug,
+};
+
+export const sampleFrontendSettings = {
+ apiHost: 'apiHost',
+ enabled: false,
+ token: 'token',
+ selectedProject: {
+ slug: normalizedProject.slug,
+ name: normalizedProject.name,
+ organizationName: normalizedProject.organizationName,
+ organizationSlug: normalizedProject.organizationSlug,
+ },
+};
+
+export const transformedSettings = {
+ api_host: 'apiHost',
+ enabled: false,
+ token: 'token',
+ project: {
+ slug: normalizedProject.slug,
+ name: normalizedProject.name,
+ organization_name: normalizedProject.organizationName,
+ organization_slug: normalizedProject.organizationSlug,
+ },
+};
+
+export const defaultProps = {
+ ...defaultStore.state,
+ ...defaultStore.getters,
+};
+
+export const initialEmptyState = {
+ apiHost: '',
+ enabled: false,
+ project: null,
+ token: '',
+ listProjectsEndpoint: TEST_HOST,
+ operationsSettingsEndpoint: TEST_HOST,
+};
+
+export const initialPopulatedState = {
+ apiHost: 'apiHost',
+ enabled: true,
+ project: JSON.stringify(projectList[0]),
+ token: 'token',
+ listProjectsEndpoint: TEST_HOST,
+ operationsSettingsEndpoint: TEST_HOST,
+};
+
+export const projectWithHtmlTemplate = {
+ ...projectList[0],
+ name: '<strong>bold</strong>',
+};
diff --git a/spec/javascripts/error_tracking_settings/store/actions_spec.js b/spec/javascripts/error_tracking_settings/store/actions_spec.js
new file mode 100644
index 00000000000..0255b3a7aa4
--- /dev/null
+++ b/spec/javascripts/error_tracking_settings/store/actions_spec.js
@@ -0,0 +1,191 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'spec/helpers/vuex_action_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import axios from '~/lib/utils/axios_utils';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import actionsDefaultExport, * as actions from '~/error_tracking_settings/store/actions';
+import * as types from '~/error_tracking_settings/store/mutation_types';
+import defaultState from '~/error_tracking_settings/store/state';
+import { projectList } from '../mock';
+
+describe('error tracking settings actions', () => {
+ let state;
+
+ describe('project list actions', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ state = { ...defaultState(), listProjectsEndpoint: TEST_HOST };
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('should request and transform the project list', done => {
+ mock.onPost(TEST_HOST).reply(() => [200, { projects: projectList }]);
+ testAction(
+ actions.fetchProjects,
+ null,
+ state,
+ [],
+ [
+ { type: 'requestProjects' },
+ {
+ type: 'receiveProjectsSuccess',
+ payload: projectList.map(convertObjectPropsToCamelCase),
+ },
+ ],
+ () => {
+ expect(mock.history.post.length).toBe(1);
+ done();
+ },
+ );
+ });
+
+ it('should handle a server error', done => {
+ mock.onPost(`${TEST_HOST}.json`).reply(() => [400]);
+ testAction(
+ actions.fetchProjects,
+ null,
+ state,
+ [],
+ [
+ { type: 'requestProjects' },
+ {
+ type: 'receiveProjectsError',
+ },
+ ],
+ () => {
+ expect(mock.history.post.length).toBe(1);
+ done();
+ },
+ );
+ });
+
+ it('should request projects correctly', done => {
+ testAction(actions.requestProjects, null, state, [{ type: types.RESET_CONNECT }], [], done);
+ });
+
+ it('should receive projects correctly', done => {
+ const testPayload = [];
+ testAction(
+ actions.receiveProjectsSuccess,
+ testPayload,
+ state,
+ [
+ { type: types.UPDATE_CONNECT_SUCCESS },
+ { type: types.RECEIVE_PROJECTS, payload: testPayload },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('should handle errors when receiving projects', done => {
+ const testPayload = [];
+ testAction(
+ actions.receiveProjectsError,
+ testPayload,
+ state,
+ [{ type: types.UPDATE_CONNECT_ERROR }, { type: types.CLEAR_PROJECTS }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('save changes actions', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ state = {
+ operationsSettingsEndpoint: TEST_HOST,
+ };
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('should save the page', done => {
+ const refreshCurrentPage = spyOnDependency(actionsDefaultExport, 'refreshCurrentPage');
+ mock.onPatch(TEST_HOST).reply(200);
+ testAction(actions.updateSettings, null, state, [], [{ type: 'requestSettings' }], () => {
+ expect(mock.history.patch.length).toBe(1);
+ expect(refreshCurrentPage).toHaveBeenCalled();
+ done();
+ });
+ });
+
+ it('should handle a server error', done => {
+ mock.onPatch(TEST_HOST).reply(400);
+ testAction(
+ actions.updateSettings,
+ null,
+ state,
+ [],
+ [
+ { type: 'requestSettings' },
+ {
+ type: 'receiveSettingsError',
+ payload: new Error('Request failed with status code 400'),
+ },
+ ],
+ () => {
+ expect(mock.history.patch.length).toBe(1);
+ done();
+ },
+ );
+ });
+
+ it('should request to save the page', done => {
+ testAction(
+ actions.requestSettings,
+ null,
+ state,
+ [{ type: types.UPDATE_SETTINGS_LOADING, payload: true }],
+ [],
+ done,
+ );
+ });
+
+ it('should handle errors when requesting to save the page', done => {
+ testAction(
+ actions.receiveSettingsError,
+ {},
+ state,
+ [{ type: types.UPDATE_SETTINGS_LOADING, payload: false }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('generic actions to update the store', () => {
+ const testData = 'test';
+ it('should reset the `connect success` flag when updating the api host', done => {
+ testAction(
+ actions.updateApiHost,
+ testData,
+ state,
+ [{ type: types.UPDATE_API_HOST, payload: testData }, { type: types.RESET_CONNECT }],
+ [],
+ done,
+ );
+ });
+
+ it('should reset the `connect success` flag when updating the token', done => {
+ testAction(
+ actions.updateToken,
+ testData,
+ state,
+ [{ type: types.UPDATE_TOKEN, payload: testData }, { type: types.RESET_CONNECT }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/javascripts/error_tracking_settings/store/getters_spec.js b/spec/javascripts/error_tracking_settings/store/getters_spec.js
new file mode 100644
index 00000000000..2c5ff084b8a
--- /dev/null
+++ b/spec/javascripts/error_tracking_settings/store/getters_spec.js
@@ -0,0 +1,93 @@
+import * as getters from '~/error_tracking_settings/store/getters';
+import defaultState from '~/error_tracking_settings/store/state';
+import { projectList, projectWithHtmlTemplate, staleProject } from '../mock';
+
+describe('Error Tracking Settings - Getters', () => {
+ let state;
+
+ beforeEach(() => {
+ state = defaultState();
+ });
+
+ describe('hasProjects', () => {
+ it('should reflect when no projects exist', () => {
+ expect(getters.hasProjects(state)).toEqual(false);
+ });
+
+ it('should reflect when projects exist', () => {
+ state.projects = projectList;
+
+ expect(getters.hasProjects(state)).toEqual(true);
+ });
+ });
+
+ describe('isProjectInvalid', () => {
+ const mockGetters = { hasProjects: true };
+ it('should show when a project is valid', () => {
+ state.projects = projectList;
+ [state.selectedProject] = projectList;
+
+ expect(getters.isProjectInvalid(state, mockGetters)).toEqual(false);
+ });
+
+ it('should show when a project is invalid', () => {
+ state.projects = projectList;
+ state.selectedProject = staleProject;
+
+ expect(getters.isProjectInvalid(state, mockGetters)).toEqual(true);
+ });
+ });
+
+ describe('dropdownLabel', () => {
+ const mockGetters = { hasProjects: false };
+ it('should display correctly when there are no projects available', () => {
+ expect(getters.dropdownLabel(state, mockGetters)).toEqual('No projects available');
+ });
+
+ it('should display correctly when a project is selected', () => {
+ [state.selectedProject] = projectList;
+
+ expect(getters.dropdownLabel(state, mockGetters)).toEqual('organizationName | name');
+ });
+
+ it('should display correctly when no project is selected', () => {
+ state.projects = projectList;
+
+ expect(getters.dropdownLabel(state, { hasProjects: true })).toEqual('Select project');
+ });
+ });
+
+ describe('invalidProjectLabel', () => {
+ it('should display an error containing the project name', () => {
+ [state.selectedProject] = projectList;
+
+ expect(getters.invalidProjectLabel(state)).toEqual(
+ 'Project "name" is no longer available. Select another project to continue.',
+ );
+ });
+
+ it('should properly escape the label text', () => {
+ state.selectedProject = projectWithHtmlTemplate;
+
+ expect(getters.invalidProjectLabel(state)).toEqual(
+ 'Project "&lt;strong&gt;bold&lt;/strong&gt;" is no longer available. Select another project to continue.',
+ );
+ });
+ });
+
+ describe('projectSelectionLabel', () => {
+ it('should show the correct message when the token is empty', () => {
+ expect(getters.projectSelectionLabel(state)).toEqual(
+ 'To enable project selection, enter a valid Auth Token',
+ );
+ });
+
+ it('should show the correct message when token exists', () => {
+ state.token = 'test-token';
+
+ expect(getters.projectSelectionLabel(state)).toEqual(
+ "Click 'Connect' to re-establish the connection to Sentry and activate the dropdown.",
+ );
+ });
+ });
+});
diff --git a/spec/javascripts/error_tracking_settings/store/mutation_spec.js b/spec/javascripts/error_tracking_settings/store/mutation_spec.js
new file mode 100644
index 00000000000..bb1f1da784e
--- /dev/null
+++ b/spec/javascripts/error_tracking_settings/store/mutation_spec.js
@@ -0,0 +1,82 @@
+import { TEST_HOST } from 'spec/test_constants';
+import mutations from '~/error_tracking_settings/store/mutations';
+import defaultState from '~/error_tracking_settings/store/state';
+import * as types from '~/error_tracking_settings/store/mutation_types';
+import {
+ initialEmptyState,
+ initialPopulatedState,
+ projectList,
+ sampleBackendProject,
+ normalizedProject,
+} from '../mock';
+
+describe('error tracking settings mutations', () => {
+ describe('mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = defaultState();
+ });
+
+ it('should create an empty initial state correctly', () => {
+ mutations[types.SET_INITIAL_STATE](state, {
+ ...initialEmptyState,
+ });
+
+ expect(state.apiHost).toEqual('');
+ expect(state.enabled).toEqual(false);
+ expect(state.selectedProject).toEqual(null);
+ expect(state.token).toEqual('');
+ expect(state.listProjectsEndpoint).toEqual(TEST_HOST);
+ expect(state.operationsSettingsEndpoint).toEqual(TEST_HOST);
+ });
+
+ it('should populate the initial state correctly', () => {
+ mutations[types.SET_INITIAL_STATE](state, {
+ ...initialPopulatedState,
+ });
+
+ expect(state.apiHost).toEqual('apiHost');
+ expect(state.enabled).toEqual(true);
+ expect(state.selectedProject).toEqual(projectList[0]);
+ expect(state.token).toEqual('token');
+ expect(state.listProjectsEndpoint).toEqual(TEST_HOST);
+ expect(state.operationsSettingsEndpoint).toEqual(TEST_HOST);
+ });
+
+ it('should receive projects successfully', () => {
+ mutations[types.RECEIVE_PROJECTS](state, [sampleBackendProject]);
+
+ expect(state.projects).toEqual([normalizedProject]);
+ });
+
+ it('should strip out unnecessary project properties', () => {
+ mutations[types.RECEIVE_PROJECTS](state, [
+ { ...sampleBackendProject, extra_property: 'extra_property' },
+ ]);
+
+ expect(state.projects).toEqual([normalizedProject]);
+ });
+
+ it('should update state when connect is successful', () => {
+ mutations[types.UPDATE_CONNECT_SUCCESS](state);
+
+ expect(state.connectSuccessful).toBe(true);
+ expect(state.connectError).toBe(false);
+ });
+
+ it('should update state when connect fails', () => {
+ mutations[types.UPDATE_CONNECT_ERROR](state);
+
+ expect(state.connectSuccessful).toBe(false);
+ expect(state.connectError).toBe(true);
+ });
+
+ it('should update state when connect is reset', () => {
+ mutations[types.RESET_CONNECT](state);
+
+ expect(state.connectSuccessful).toBe(false);
+ expect(state.connectError).toBe(false);
+ });
+ });
+});
diff --git a/spec/javascripts/error_tracking_settings/utils_spec.js b/spec/javascripts/error_tracking_settings/utils_spec.js
new file mode 100644
index 00000000000..4b144f7daf1
--- /dev/null
+++ b/spec/javascripts/error_tracking_settings/utils_spec.js
@@ -0,0 +1,29 @@
+import { transformFrontendSettings } from '~/error_tracking_settings/utils';
+import { sampleFrontendSettings, transformedSettings } from './mock';
+
+describe('error tracking settings utils', () => {
+ describe('data transform functions', () => {
+ it('should transform settings successfully for the backend', () => {
+ expect(transformFrontendSettings(sampleFrontendSettings)).toEqual(transformedSettings);
+ });
+
+ it('should transform empty values in the settings object to null', () => {
+ const emptyFrontendSettingsObject = {
+ apiHost: '',
+ enabled: false,
+ token: '',
+ selectedProject: null,
+ };
+ const transformedEmptySettingsObject = {
+ api_host: null,
+ enabled: false,
+ token: null,
+ project: null,
+ };
+
+ expect(transformFrontendSettings(emptyFrontendSettingsObject)).toEqual(
+ transformedEmptySettingsObject,
+ );
+ });
+ });
+});
diff --git a/spec/javascripts/fixtures/autocomplete_sources.rb b/spec/javascripts/fixtures/autocomplete_sources.rb
deleted file mode 100644
index c117fb7cd24..00000000000
--- a/spec/javascripts/fixtures/autocomplete_sources.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Projects::AutocompleteSourcesController, '(JavaScript fixtures)', type: :controller do
- include JavaScriptFixturesHelpers
-
- set(:admin) { create(:admin) }
- set(:group) { create(:group, name: 'frontend-fixtures') }
- set(:project) { create(:project, namespace: group, path: 'autocomplete-sources-project') }
- set(:issue) { create(:issue, project: project) }
-
- before(:all) do
- clean_frontend_fixtures('autocomplete_sources/')
- end
-
- before do
- sign_in(admin)
- end
-
- it 'autocomplete_sources/labels.json' do |example|
- issue.labels << create(:label, project: project, title: 'bug')
- issue.labels << create(:label, project: project, title: 'critical')
-
- create(:label, project: project, title: 'feature')
- create(:label, project: project, title: 'documentation')
-
- get :labels,
- format: :json,
- params: {
- namespace_id: group.path,
- project_id: project.path,
- type: issue.class.name,
- type_id: issue.id
- }
-
- expect(response).to be_success
- store_frontend_fixture(response, example.description)
- end
-end
diff --git a/spec/javascripts/fixtures/emojis.rb b/spec/javascripts/fixtures/emojis.rb
new file mode 100644
index 00000000000..0e7257ee681
--- /dev/null
+++ b/spec/javascripts/fixtures/emojis.rb
@@ -0,0 +1,16 @@
+require 'spec_helper'
+
+describe 'Emojis (JavaScript fixtures)' do
+ include JavaScriptFixturesHelpers
+
+ before(:all) do
+ clean_frontend_fixtures('emojis/')
+ end
+
+ it 'emojis/emojis.json' do |example|
+ # Copying the emojis.json from the public folder
+ fixture_file_name = File.expand_path('emojis/emojis.json', JavaScriptFixturesHelpers::FIXTURE_PATH)
+ FileUtils.mkdir_p(File.dirname(fixture_file_name))
+ FileUtils.cp(Rails.root.join('public/-/emojis/1/emojis.json'), fixture_file_name)
+ end
+end
diff --git a/spec/javascripts/fixtures/static_fixtures.rb b/spec/javascripts/fixtures/static_fixtures.rb
index 4569f16f0ca..852a82587b9 100644
--- a/spec/javascripts/fixtures/static_fixtures.rb
+++ b/spec/javascripts/fixtures/static_fixtures.rb
@@ -7,23 +7,23 @@ describe ApplicationController, '(Static JavaScript fixtures)', type: :controlle
clean_frontend_fixtures('static/')
end
- fixtures_path = File.expand_path(JavaScriptFixturesHelpers::FIXTURE_PATH, Rails.root)
- haml_fixtures = Dir.glob(File.expand_path('**/*.haml', fixtures_path)).map do |file_path|
- file_path.sub(/\A#{fixtures_path}#{File::SEPARATOR}/, '')
- end
+ JavaScriptFixturesHelpers::FIXTURE_PATHS.each do |fixture_path|
+ fixtures_path = File.expand_path(fixture_path, Rails.root)
+
+ Dir.glob(File.expand_path('**/*.haml', fixtures_path)).map do |file_path|
+ template_file_name = file_path.sub(/\A#{fixtures_path}#{File::SEPARATOR}/, '')
- haml_fixtures.each do |template_file_name|
- it "static/#{template_file_name.sub(/\.haml\z/, '.raw')}" do |example|
- fixture_file_name = example.description
- rendered = render_template(template_file_name)
- store_frontend_fixture(rendered, fixture_file_name)
+ it "static/#{template_file_name.sub(/\.haml\z/, '.raw')}" do |example|
+ fixture_file_name = example.description
+ rendered = render_template(fixture_path, template_file_name)
+ store_frontend_fixture(rendered, fixture_file_name)
+ end
end
end
private
- def render_template(template_file_name)
- fixture_path = JavaScriptFixturesHelpers::FIXTURE_PATH
+ def render_template(fixture_path, template_file_name)
controller = ApplicationController.new
controller.prepend_view_path(fixture_path)
controller.render_to_string(template: template_file_name, layout: false)
diff --git a/spec/javascripts/ide/components/new_dropdown/modal_spec.js b/spec/javascripts/ide/components/new_dropdown/modal_spec.js
index d94cc1a8faa..d1a0964ccdd 100644
--- a/spec/javascripts/ide/components/new_dropdown/modal_spec.js
+++ b/spec/javascripts/ide/components/new_dropdown/modal_spec.js
@@ -18,6 +18,9 @@ describe('new file modal component', () => {
store.state.entryModal = {
type,
path: '',
+ entry: {
+ path: '',
+ },
};
vm = createComponentWithStore(Component, store).$mount();
@@ -74,6 +77,7 @@ describe('new file modal component', () => {
entry: {
name: 'test',
type: 'blob',
+ path: 'test-path',
},
};
@@ -97,7 +101,7 @@ describe('new file modal component', () => {
describe('entryName', () => {
it('returns entries name', () => {
- expect(vm.entryName).toBe('test');
+ expect(vm.entryName).toBe('test-path');
});
it('updated name', () => {
@@ -107,4 +111,53 @@ describe('new file modal component', () => {
});
});
});
+
+ describe('submitForm', () => {
+ it('throws an error when target entry exists', () => {
+ const store = createStore();
+ store.state.entryModal = {
+ type: 'rename',
+ path: 'test-path/test',
+ entry: {
+ name: 'test',
+ type: 'blob',
+ path: 'test-path/test',
+ },
+ };
+ store.state.entries = {
+ 'test-path/test': {
+ name: 'test',
+ deleted: false,
+ },
+ };
+
+ vm = createComponentWithStore(Component, store).$mount();
+ const flashSpy = spyOnDependency(modal, 'flash');
+ vm.submitForm();
+
+ expect(flashSpy).toHaveBeenCalled();
+ });
+
+ it('calls createTempEntry when target path does not exist', () => {
+ const store = createStore();
+ store.state.entryModal = {
+ type: 'rename',
+ path: 'test-path/test',
+ entry: {
+ name: 'test',
+ type: 'blob',
+ path: 'test-path1/test',
+ },
+ };
+
+ vm = createComponentWithStore(Component, store).$mount();
+ spyOn(vm, 'createTempEntry').and.callFake(() => Promise.resolve());
+ vm.submitForm();
+
+ expect(vm.createTempEntry).toHaveBeenCalledWith({
+ name: 'test-path1',
+ type: 'tree',
+ });
+ });
+ });
});
diff --git a/spec/javascripts/ide/lib/files_spec.js b/spec/javascripts/ide/lib/files_spec.js
new file mode 100644
index 00000000000..fe791aa2b74
--- /dev/null
+++ b/spec/javascripts/ide/lib/files_spec.js
@@ -0,0 +1,77 @@
+import { viewerInformationForPath } from '~/vue_shared/components/content_viewer/lib/viewer_utils';
+import { decorateFiles, splitParent } from '~/ide/lib/files';
+import { decorateData } from '~/ide/stores/utils';
+
+const TEST_BRANCH_ID = 'lorem-ipsum';
+const TEST_PROJECT_ID = 10;
+
+const createEntries = paths => {
+ const createEntry = (acc, { path, type, children }) => {
+ // Sometimes we need to end the url with a '/'
+ const createUrl = base => (type === 'tree' ? `${base}/` : base);
+
+ const { name, parent } = splitParent(path);
+ const parentEntry = acc[parent];
+
+ acc[path] = {
+ ...decorateData({
+ projectId: TEST_PROJECT_ID,
+ branchId: TEST_BRANCH_ID,
+ id: path,
+ name,
+ path,
+ url: createUrl(`/${TEST_PROJECT_ID}/${type}/${TEST_BRANCH_ID}/-/${path}`),
+ type,
+ previewMode: viewerInformationForPath(path),
+ parentPath: parent,
+ parentTreeUrl: parentEntry
+ ? parentEntry.url
+ : createUrl(`/${TEST_PROJECT_ID}/${type}/${TEST_BRANCH_ID}`),
+ }),
+ tree: children.map(childName => jasmine.objectContaining({ name: childName })),
+ };
+
+ return acc;
+ };
+
+ const entries = paths.reduce(createEntry, {});
+
+ // Wrap entries in jasmine.objectContaining.
+ // We couldn't do this earlier because we still need to select properties from parent entries.
+ return Object.keys(entries).reduce((acc, key) => {
+ acc[key] = jasmine.objectContaining(entries[key]);
+
+ return acc;
+ }, {});
+};
+
+describe('IDE lib decorate files', () => {
+ it('creates entries and treeList', () => {
+ const data = ['app/assets/apples/foo.js', 'app/bugs.js', 'README.md'];
+ const expectedEntries = createEntries([
+ { path: 'app', type: 'tree', children: ['assets', 'bugs.js'] },
+ { path: 'app/assets', type: 'tree', children: ['apples'] },
+ { path: 'app/assets/apples', type: 'tree', children: ['foo.js'] },
+ { path: 'app/assets/apples/foo.js', type: 'blob', children: [] },
+ { path: 'app/bugs.js', type: 'blob', children: [] },
+ { path: 'README.md', type: 'blob', children: [] },
+ ]);
+
+ const { entries, treeList } = decorateFiles({
+ data,
+ branchId: TEST_BRANCH_ID,
+ projectId: TEST_PROJECT_ID,
+ });
+
+ // Here we test the keys and then each key/value individually because `expect(entries).toEqual(expectedEntries)`
+ // was taking a very long time for some reason. Probably due to large objects and nested `jasmine.objectContaining`.
+ const entryKeys = Object.keys(entries);
+
+ expect(entryKeys).toEqual(Object.keys(expectedEntries));
+ entryKeys.forEach(key => {
+ expect(entries[key]).toEqual(expectedEntries[key]);
+ });
+
+ expect(treeList).toEqual([expectedEntries.app, expectedEntries['README.md']]);
+ });
+});
diff --git a/spec/javascripts/ide/stores/actions/merge_request_spec.js b/spec/javascripts/ide/stores/actions/merge_request_spec.js
index 9bfc7c397b8..a5839630657 100644
--- a/spec/javascripts/ide/stores/actions/merge_request_spec.js
+++ b/spec/javascripts/ide/stores/actions/merge_request_spec.js
@@ -2,6 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import store from '~/ide/stores';
import actions, {
+ getMergeRequestsForBranch,
getMergeRequestData,
getMergeRequestChanges,
getMergeRequestVersions,
@@ -27,6 +28,98 @@ describe('IDE store merge request actions', () => {
resetStore(store);
});
+ describe('getMergeRequestsForBranch', () => {
+ describe('success', () => {
+ const mrData = { iid: 2, source_branch: 'bar' };
+ const mockData = [mrData];
+
+ describe('base case', () => {
+ beforeEach(() => {
+ spyOn(service, 'getProjectMergeRequests').and.callThrough();
+ mock.onGet(/api\/(.*)\/projects\/abcproject\/merge_requests/).reply(200, mockData);
+ });
+
+ it('calls getProjectMergeRequests service method', done => {
+ store
+ .dispatch('getMergeRequestsForBranch', { projectId: 'abcproject', branchId: 'bar' })
+ .then(() => {
+ expect(service.getProjectMergeRequests).toHaveBeenCalledWith('abcproject', {
+ source_branch: 'bar',
+ order_by: 'created_at',
+ per_page: 1,
+ });
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('sets the "Merge Request" Object', done => {
+ store
+ .dispatch('getMergeRequestsForBranch', { projectId: 'abcproject', branchId: 'bar' })
+ .then(() => {
+ expect(Object.keys(store.state.projects.abcproject.mergeRequests).length).toEqual(1);
+ expect(Object.keys(store.state.projects.abcproject.mergeRequests)[0]).toEqual('2');
+ expect(store.state.projects.abcproject.mergeRequests[2]).toEqual(
+ jasmine.objectContaining(mrData),
+ );
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('sets "Current Merge Request" object to the most recent MR', done => {
+ store
+ .dispatch('getMergeRequestsForBranch', { projectId: 'abcproject', branchId: 'bar' })
+ .then(() => {
+ expect(store.state.currentMergeRequestId).toEqual('2');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('no merge requests for branch available case', () => {
+ beforeEach(() => {
+ spyOn(service, 'getProjectMergeRequests').and.callThrough();
+ mock.onGet(/api\/(.*)\/projects\/abcproject\/merge_requests/).reply(200, []);
+ });
+
+ it('does not fail if there are no merge requests for current branch', done => {
+ store
+ .dispatch('getMergeRequestsForBranch', { projectId: 'abcproject', branchId: 'foo' })
+ .then(() => {
+ expect(Object.keys(store.state.projects.abcproject.mergeRequests).length).toEqual(0);
+ expect(store.state.currentMergeRequestId).toEqual('');
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet(/api\/(.*)\/projects\/abcproject\/merge_requests/).networkError();
+ });
+
+ it('flashes message, if error', done => {
+ const flashSpy = spyOnDependency(actions, 'flash');
+
+ getMergeRequestsForBranch({ commit() {} }, { projectId: 'abcproject', branchId: 'bar' })
+ .then(() => {
+ fail('Expected getMergeRequestsForBranch to throw an error');
+ })
+ .catch(() => {
+ expect(flashSpy).toHaveBeenCalled();
+ expect(flashSpy.calls.argsFor(0)[0]).toEqual('Error fetching merge requests for bar');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
+
describe('getMergeRequestData', () => {
describe('success', () => {
beforeEach(() => {
diff --git a/spec/javascripts/ide/stores/actions/project_spec.js b/spec/javascripts/ide/stores/actions/project_spec.js
index 7d8c9edd965..7b0963713fb 100644
--- a/spec/javascripts/ide/stores/actions/project_spec.js
+++ b/spec/javascripts/ide/stores/actions/project_spec.js
@@ -249,6 +249,7 @@ describe('IDE store project actions', () => {
['setCurrentBranchId', branch.branchId],
['getBranchData', branch],
['getFiles', branch],
+ ['getMergeRequestsForBranch', branch],
]);
})
.then(done)
diff --git a/spec/javascripts/ide/stores/actions/tree_spec.js b/spec/javascripts/ide/stores/actions/tree_spec.js
index bd41e87bf0e..fbb676aab33 100644
--- a/spec/javascripts/ide/stores/actions/tree_spec.js
+++ b/spec/javascripts/ide/stores/actions/tree_spec.js
@@ -20,6 +20,7 @@ describe('Multi-file store tree actions', () => {
};
beforeEach(() => {
+ jasmine.clock().install();
spyOn(router, 'push');
mock = new MockAdapter(axios);
@@ -37,6 +38,7 @@ describe('Multi-file store tree actions', () => {
});
afterEach(() => {
+ jasmine.clock().uninstall();
mock.restore();
resetStore(store);
});
@@ -70,6 +72,11 @@ describe('Multi-file store tree actions', () => {
store
.dispatch('getFiles', basicCallParameters)
.then(() => {
+ // The populating of the tree is deferred for performance reasons.
+ // See this merge request for details: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/25700
+ jasmine.clock().tick(1);
+ })
+ .then(() => {
projectTree = store.state.trees['abcproject/master'];
expect(projectTree.tree.length).toBe(2);
diff --git a/spec/javascripts/ide/stores/actions_spec.js b/spec/javascripts/ide/stores/actions_spec.js
index df291ade3f7..0b5587d02ae 100644
--- a/spec/javascripts/ide/stores/actions_spec.js
+++ b/spec/javascripts/ide/stores/actions_spec.js
@@ -499,12 +499,12 @@ describe('Multi-file store actions', () => {
testAction(
renameEntry,
- { path: 'test', name: 'new-name' },
+ { path: 'test', name: 'new-name', entryPath: null, parentPath: 'parent-path' },
store.state,
[
{
type: types.RENAME_ENTRY,
- payload: { path: 'test', name: 'new-name', entryPath: null },
+ payload: { path: 'test', name: 'new-name', entryPath: null, parentPath: 'parent-path' },
},
],
[{ type: 'deleteEntry', payload: 'test' }],
@@ -527,17 +527,33 @@ describe('Multi-file store actions', () => {
testAction(
renameEntry,
- { path: 'test', name: 'new-name' },
+ { path: 'test', name: 'new-name', parentPath: 'parent-path' },
store.state,
[
{
type: types.RENAME_ENTRY,
- payload: { path: 'test', name: 'new-name', entryPath: null },
+ payload: { path: 'test', name: 'new-name', entryPath: null, parentPath: 'parent-path' },
},
],
[
- { type: 'renameEntry', payload: { path: 'test', name: 'new-name', entryPath: 'tree-1' } },
- { type: 'renameEntry', payload: { path: 'test', name: 'new-name', entryPath: 'tree-2' } },
+ {
+ type: 'renameEntry',
+ payload: {
+ path: 'test',
+ name: 'new-name',
+ entryPath: 'tree-1',
+ parentPath: 'parent-path/new-name',
+ },
+ },
+ {
+ type: 'renameEntry',
+ payload: {
+ path: 'test',
+ name: 'new-name',
+ entryPath: 'tree-2',
+ parentPath: 'parent-path/new-name',
+ },
+ },
{ type: 'deleteEntry', payload: 'test' },
],
done,
diff --git a/spec/javascripts/ide/stores/mutations_spec.js b/spec/javascripts/ide/stores/mutations_spec.js
index 41dd3d3c67f..5ee098bf17f 100644
--- a/spec/javascripts/ide/stores/mutations_spec.js
+++ b/spec/javascripts/ide/stores/mutations_spec.js
@@ -298,7 +298,12 @@ describe('Multi-file store mutations', () => {
});
it('creates new renamed entry', () => {
- mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' });
+ mutations.RENAME_ENTRY(localState, {
+ path: 'oldPath',
+ name: 'newPath',
+ entryPath: null,
+ parentPath: '',
+ });
expect(localState.entries.newPath).toEqual({
...localState.entries.oldPath,
@@ -335,7 +340,12 @@ describe('Multi-file store mutations', () => {
...file(),
};
- mutations.RENAME_ENTRY(localState, { path: 'oldPath', name: 'newPath' });
+ mutations.RENAME_ENTRY(localState, {
+ path: 'oldPath',
+ name: 'newPath',
+ entryPath: null,
+ parentPath: 'parentPath',
+ });
expect(localState.entries.parentPath.tree.length).toBe(1);
});
diff --git a/spec/javascripts/import_projects/components/import_projects_table_spec.js b/spec/javascripts/import_projects/components/import_projects_table_spec.js
index a1ff84ce259..ab8642bf0dd 100644
--- a/spec/javascripts/import_projects/components/import_projects_table_spec.js
+++ b/spec/javascripts/import_projects/components/import_projects_table_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import store from '~/import_projects/store';
+import createStore from '~/import_projects/store';
import importProjectsTable from '~/import_projects/components/import_projects_table.vue';
import STATUS_MAP from '~/import_projects/constants';
import setTimeoutPromise from '../../helpers/set_timeout_promise_helper';
@@ -9,6 +9,7 @@ import setTimeoutPromise from '../../helpers/set_timeout_promise_helper';
describe('ImportProjectsTable', () => {
let vm;
let mock;
+ let store;
const reposPath = '/repos-path';
const jobsPath = '/jobs-path';
const providerTitle = 'THE PROVIDER';
@@ -31,12 +32,13 @@ describe('ImportProjectsTable', () => {
},
}).$mount();
- component.$store.dispatch('stopJobsPolling');
+ store.dispatch('stopJobsPolling');
return component;
}
beforeEach(() => {
+ store = createStore();
store.dispatch('setInitialData', { reposPath });
mock = new MockAdapter(axios);
});
@@ -167,7 +169,7 @@ describe('ImportProjectsTable', () => {
expect(vm.$el.querySelector(`.ic-status_${statusObject.icon}`)).not.toBeNull();
mock.onGet(jobsPath).replyOnce(200, updatedProjects);
- return vm.$store.dispatch('restartJobsPolling');
+ return store.dispatch('restartJobsPolling');
})
.then(() => setTimeoutPromise())
.then(() => {
diff --git a/spec/javascripts/import_projects/components/imported_project_table_row_spec.js b/spec/javascripts/import_projects/components/imported_project_table_row_spec.js
index 8af3b5954a9..7dac7e9ccc1 100644
--- a/spec/javascripts/import_projects/components/imported_project_table_row_spec.js
+++ b/spec/javascripts/import_projects/components/imported_project_table_row_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import store from '~/import_projects/store';
+import createStore from '~/import_projects/store';
import importedProjectTableRow from '~/import_projects/components/imported_project_table_row.vue';
import STATUS_MAP from '~/import_projects/constants';
@@ -16,6 +16,7 @@ describe('ImportedProjectTableRow', () => {
function createComponent() {
const ImportedProjectTableRow = Vue.extend(importedProjectTableRow);
+ const store = createStore();
return new ImportedProjectTableRow({
store,
propsData: {
diff --git a/spec/javascripts/import_projects/components/provider_repo_table_row_spec.js b/spec/javascripts/import_projects/components/provider_repo_table_row_spec.js
index 69377f8d685..4d2bacd2ad0 100644
--- a/spec/javascripts/import_projects/components/provider_repo_table_row_spec.js
+++ b/spec/javascripts/import_projects/components/provider_repo_table_row_spec.js
@@ -1,12 +1,13 @@
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import store from '~/import_projects/store';
+import createStore from '~/import_projects/store';
import providerRepoTableRow from '~/import_projects/components/provider_repo_table_row.vue';
import STATUS_MAP, { STATUSES } from '~/import_projects/constants';
import setTimeoutPromise from '../../helpers/set_timeout_promise_helper';
describe('ProviderRepoTableRow', () => {
+ let store;
let vm;
const repo = {
id: 10,
@@ -28,6 +29,10 @@ describe('ProviderRepoTableRow', () => {
}).$mount();
}
+ beforeEach(() => {
+ store = createStore();
+ });
+
afterEach(() => {
vm.$destroy();
});
diff --git a/spec/javascripts/jobs/store/getters_spec.js b/spec/javascripts/jobs/store/getters_spec.js
index 7931b2af79f..379114c3737 100644
--- a/spec/javascripts/jobs/store/getters_spec.js
+++ b/spec/javascripts/jobs/store/getters_spec.js
@@ -151,6 +151,61 @@ describe('Job Store Getters', () => {
});
});
+ describe('shouldRenderSharedRunnerLimitWarning', () => {
+ describe('without runners information', () => {
+ it('returns false', () => {
+ expect(getters.shouldRenderSharedRunnerLimitWarning(localState)).toEqual(false);
+ });
+ });
+
+ describe('with runners information', () => {
+ describe('when used quota is less than limit', () => {
+ it('returns false', () => {
+ localState.job.runners = {
+ quota: {
+ used: 33,
+ limit: 2000,
+ },
+ available: true,
+ online: true,
+ };
+
+ expect(getters.shouldRenderSharedRunnerLimitWarning(localState)).toEqual(false);
+ });
+ });
+
+ describe('when used quota is equal to limit', () => {
+ it('returns true', () => {
+ localState.job.runners = {
+ quota: {
+ used: 2000,
+ limit: 2000,
+ },
+ available: true,
+ online: true,
+ };
+
+ expect(getters.shouldRenderSharedRunnerLimitWarning(localState)).toEqual(true);
+ });
+ });
+
+ describe('when used quota is bigger than limit', () => {
+ it('returns true', () => {
+ localState.job.runners = {
+ quota: {
+ used: 2002,
+ limit: 2000,
+ },
+ available: true,
+ online: true,
+ };
+
+ expect(getters.shouldRenderSharedRunnerLimitWarning(localState)).toEqual(true);
+ });
+ });
+ });
+ });
+
describe('hasRunnersForProject', () => {
describe('with available and offline runners', () => {
it('returns true', () => {
diff --git a/spec/javascripts/lib/utils/number_utility_spec.js b/spec/javascripts/lib/utils/number_utility_spec.js
index 94c6214c86a..818404bad81 100644
--- a/spec/javascripts/lib/utils/number_utility_spec.js
+++ b/spec/javascripts/lib/utils/number_utility_spec.js
@@ -4,6 +4,7 @@ import {
bytesToMiB,
bytesToGiB,
numberToHumanSize,
+ sum,
} from '~/lib/utils/number_utils';
describe('Number Utils', () => {
@@ -87,4 +88,14 @@ describe('Number Utils', () => {
expect(numberToHumanSize(10737418240)).toEqual('10.00 GiB');
});
});
+
+ describe('sum', () => {
+ it('should add up two values', () => {
+ expect(sum(1, 2)).toEqual(3);
+ });
+
+ it('should add up all the values in an array when passed to a reducer', () => {
+ expect([1, 2, 3, 4, 5].reduce(sum)).toEqual(15);
+ });
+ });
});
diff --git a/spec/javascripts/monitoring/charts/area_spec.js b/spec/javascripts/monitoring/charts/area_spec.js
index d334ef7ba4f..fb49290be19 100644
--- a/spec/javascripts/monitoring/charts/area_spec.js
+++ b/spec/javascripts/monitoring/charts/area_spec.js
@@ -7,6 +7,7 @@ import MonitoringMock, { deploymentData } from '../mock_data';
describe('Area component', () => {
const mockWidgets = 'mockWidgets';
+ const mockSvgPathContent = 'mockSvgPathContent';
let mockGraphData;
let areaChart;
let spriteSpy;
@@ -30,7 +31,7 @@ describe('Area component', () => {
});
spriteSpy = spyOnDependency(Area, 'getSvgIconPathContent').and.callFake(
- () => new Promise(resolve => resolve()),
+ () => new Promise(resolve => resolve(mockSvgPathContent)),
);
});
@@ -74,15 +75,6 @@ describe('Area component', () => {
expect(shallowWrapperContainsSlotText(glAreaChart, 'tooltipTitle', mockTitle)).toBe(true);
});
- it('recieves tooltip content', () => {
- const mockContent = 'mockContent';
- areaChart.vm.tooltip.content = mockContent;
-
- expect(shallowWrapperContainsSlotText(glAreaChart, 'tooltipContent', mockContent)).toBe(
- true,
- );
- });
-
describe('when tooltip is showing deployment data', () => {
beforeEach(() => {
areaChart.vm.tooltip.isDeployment = true;
@@ -110,6 +102,7 @@ describe('Area component', () => {
const generateSeriesData = type => ({
seriesData: [
{
+ seriesName: areaChart.vm.chartData[0].name,
componentSubType: type,
value: [mockDate, 5.55555],
},
@@ -127,7 +120,14 @@ describe('Area component', () => {
});
it('formats tooltip content', () => {
- expect(areaChart.vm.tooltip.content).toBe('CPU 5.556');
+ expect(areaChart.vm.tooltip.content).toEqual([{ name: 'Core Usage', value: '5.556' }]);
+ expect(
+ shallowWrapperContainsSlotText(
+ areaChart.find(GlAreaChart),
+ 'tooltipContent',
+ 'Core Usage 5.556',
+ ),
+ ).toBe(true);
});
});
@@ -146,24 +146,31 @@ describe('Area component', () => {
});
});
- describe('getScatterSymbol', () => {
+ describe('setSvg', () => {
+ const mockSvgName = 'mockSvgName';
+
beforeEach(() => {
- areaChart.vm.getScatterSymbol();
+ areaChart.vm.setSvg(mockSvgName);
});
- it('gets rocket svg path content for use as deployment data symbol', () => {
- expect(spriteSpy).toHaveBeenCalledWith('rocket');
+ it('gets svg path content', () => {
+ expect(spriteSpy).toHaveBeenCalledWith(mockSvgName);
+ });
+
+ it('sets svg path content', done => {
+ areaChart.vm.$nextTick(() => {
+ expect(areaChart.vm.svgs[mockSvgName]).toBe(`path://${mockSvgPathContent}`);
+ done();
+ });
});
});
describe('onResize', () => {
const mockWidth = 233;
- const mockHeight = 144;
beforeEach(() => {
spyOn(Element.prototype, 'getBoundingClientRect').and.callFake(() => ({
width: mockWidth,
- height: mockHeight,
}));
areaChart.vm.onResize();
});
@@ -171,22 +178,25 @@ describe('Area component', () => {
it('sets area chart width', () => {
expect(areaChart.vm.width).toBe(mockWidth);
});
-
- it('sets area chart height', () => {
- expect(areaChart.vm.height).toBe(mockHeight);
- });
});
});
describe('computed', () => {
describe('chartData', () => {
+ let chartData;
+ const seriesData = () => chartData[0];
+
+ beforeEach(() => {
+ ({ chartData } = areaChart.vm);
+ });
+
it('utilizes all data points', () => {
- expect(Object.keys(areaChart.vm.chartData)).toEqual(['Cores']);
- expect(areaChart.vm.chartData.Cores.length).toBe(297);
+ expect(chartData.length).toBe(1);
+ expect(seriesData().data.length).toBe(297);
});
it('creates valid data', () => {
- const data = areaChart.vm.chartData.Cores;
+ const { data } = seriesData();
expect(
data.filter(([time, value]) => new Date(time).getTime() > 0 && typeof value === 'number')
@@ -205,12 +215,6 @@ describe('Area component', () => {
});
});
- describe('xAxisLabel', () => {
- it('constructs a label for the chart x-axis', () => {
- expect(areaChart.vm.xAxisLabel).toBe('Core Usage');
- });
- });
-
describe('yAxisLabel', () => {
it('constructs a label for the chart y-axis', () => {
expect(areaChart.vm.yAxisLabel).toBe('CPU');
diff --git a/spec/javascripts/notes/components/note_form_spec.js b/spec/javascripts/notes/components/note_form_spec.js
index 5db20fd285f..7cc324cfe44 100644
--- a/spec/javascripts/notes/components/note_form_spec.js
+++ b/spec/javascripts/notes/components/note_form_spec.js
@@ -1,17 +1,15 @@
-import Vue from 'vue';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
import createStore from '~/notes/stores';
-import issueNoteForm from '~/notes/components/note_form.vue';
+import NoteForm from '~/notes/components/note_form.vue';
+import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import { noteableDataMock, notesDataMock } from '../mock_data';
-import { keyboardDownEvent } from '../../issue_show/helpers';
describe('issue_note_form component', () => {
let store;
- let vm;
+ let wrapper;
let props;
beforeEach(() => {
- const Component = Vue.extend(issueNoteForm);
-
store = createStore();
store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
@@ -22,26 +20,35 @@ describe('issue_note_form component', () => {
noteId: '545',
};
- vm = new Component({
+ const localVue = createLocalVue();
+ wrapper = shallowMount(NoteForm, {
store,
propsData: props,
- }).$mount();
+ // see https://gitlab.com/gitlab-org/gitlab-ce/issues/56317 for the following
+ localVue,
+ sync: false,
+ });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('noteHash', () => {
it('returns note hash string based on `noteId`', () => {
- expect(vm.noteHash).toBe(`#note_${props.noteId}`);
+ expect(wrapper.vm.noteHash).toBe(`#note_${props.noteId}`);
});
it('return note hash as `#` when `noteId` is empty', done => {
- vm.noteId = '';
- Vue.nextTick()
+ wrapper.setProps({
+ ...props,
+ noteId: '',
+ });
+
+ wrapper.vm
+ .$nextTick()
.then(() => {
- expect(vm.noteHash).toBe('#');
+ expect(wrapper.vm.noteHash).toBe('#');
})
.then(done)
.catch(done.fail);
@@ -50,95 +57,127 @@ describe('issue_note_form component', () => {
describe('conflicts editing', () => {
it('should show conflict message if note changes outside the component', done => {
- vm.isEditing = true;
- vm.noteBody = 'Foo';
+ wrapper.setProps({
+ ...props,
+ isEditing: true,
+ noteBody: 'Foo',
+ });
+
const message =
'This comment has changed since you started editing, please review the updated comment to ensure information is not lost.';
- Vue.nextTick(() => {
- expect(
- vm.$el
- .querySelector('.js-conflict-edit-warning')
- .textContent.replace(/\s+/g, ' ')
- .trim(),
- ).toEqual(message);
- done();
- });
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const conflictWarning = wrapper.find('.js-conflict-edit-warning');
+
+ expect(conflictWarning.exists()).toBe(true);
+ expect(
+ conflictWarning
+ .text()
+ .replace(/\s+/g, ' ')
+ .trim(),
+ ).toBe(message);
+ })
+ .then(done)
+ .catch(done.fail);
});
});
describe('form', () => {
it('should render text area with placeholder', () => {
- expect(vm.$el.querySelector('textarea').getAttribute('placeholder')).toEqual(
+ const textarea = wrapper.find('textarea');
+
+ expect(textarea.attributes('placeholder')).toEqual(
'Write a comment or drag your files here…',
);
});
it('should link to markdown docs', () => {
const { markdownDocsPath } = notesDataMock;
+ const markdownField = wrapper.find(MarkdownField);
+ const markdownFieldProps = markdownField.props();
- expect(vm.$el.querySelector(`a[href="${markdownDocsPath}"]`).textContent.trim()).toEqual(
- 'Markdown',
- );
+ expect(markdownFieldProps.markdownDocsPath).toBe(markdownDocsPath);
});
describe('keyboard events', () => {
+ let textarea;
+
+ beforeEach(() => {
+ textarea = wrapper.find('textarea');
+ textarea.setValue('Foo');
+ });
+
describe('up', () => {
it('should ender edit mode', () => {
- spyOn(vm, 'editMyLastNote').and.callThrough();
- vm.$el.querySelector('textarea').value = 'Foo';
- vm.$el.querySelector('textarea').dispatchEvent(keyboardDownEvent(38, true));
+ // TODO: do not spy on vm
+ spyOn(wrapper.vm, 'editMyLastNote').and.callThrough();
+
+ textarea.trigger('keydown.up');
- expect(vm.editMyLastNote).toHaveBeenCalled();
+ expect(wrapper.vm.editMyLastNote).toHaveBeenCalled();
});
});
describe('enter', () => {
it('should save note when cmd+enter is pressed', () => {
- spyOn(vm, 'handleUpdate').and.callThrough();
- vm.$el.querySelector('textarea').value = 'Foo';
- vm.$el.querySelector('textarea').dispatchEvent(keyboardDownEvent(13, true));
+ textarea.trigger('keydown.enter', { metaKey: true });
+
+ const { handleFormUpdate } = wrapper.emitted();
- expect(vm.handleUpdate).toHaveBeenCalled();
+ expect(handleFormUpdate.length).toBe(1);
});
it('should save note when ctrl+enter is pressed', () => {
- spyOn(vm, 'handleUpdate').and.callThrough();
- vm.$el.querySelector('textarea').value = 'Foo';
- vm.$el.querySelector('textarea').dispatchEvent(keyboardDownEvent(13, false, true));
+ textarea.trigger('keydown.enter', { ctrlKey: true });
- expect(vm.handleUpdate).toHaveBeenCalled();
+ const { handleFormUpdate } = wrapper.emitted();
+
+ expect(handleFormUpdate.length).toBe(1);
});
});
});
describe('actions', () => {
it('should be possible to cancel', done => {
- spyOn(vm, 'cancelHandler').and.callThrough();
- vm.isEditing = true;
+ // TODO: do not spy on vm
+ spyOn(wrapper.vm, 'cancelHandler').and.callThrough();
+ wrapper.setProps({
+ ...props,
+ isEditing: true,
+ });
- Vue.nextTick(() => {
- vm.$el.querySelector('.note-edit-cancel').click();
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const cancelButton = wrapper.find('.note-edit-cancel');
+ cancelButton.trigger('click');
- Vue.nextTick(() => {
- expect(vm.cancelHandler).toHaveBeenCalled();
- done();
- });
- });
+ expect(wrapper.vm.cancelHandler).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
});
it('should be possible to update the note', done => {
- vm.isEditing = true;
-
- Vue.nextTick(() => {
- vm.$el.querySelector('textarea').value = 'Foo';
- vm.$el.querySelector('.js-vue-issue-save').click();
-
- Vue.nextTick(() => {
- expect(vm.isSubmitting).toEqual(true);
- done();
- });
+ wrapper.setProps({
+ ...props,
+ isEditing: true,
});
+
+ wrapper.vm
+ .$nextTick()
+ .then(() => {
+ const textarea = wrapper.find('textarea');
+ textarea.setValue('Foo');
+ const saveButton = wrapper.find('.js-vue-issue-save');
+ saveButton.trigger('click');
+
+ expect(wrapper.vm.isSubmitting).toEqual(true);
+ })
+ .then(done)
+ .catch(done.fail);
});
});
});
diff --git a/spec/javascripts/notes/stores/getters_spec.js b/spec/javascripts/notes/stores/getters_spec.js
index c066975a43b..8f3c493dd4c 100644
--- a/spec/javascripts/notes/stores/getters_spec.js
+++ b/spec/javascripts/notes/stores/getters_spec.js
@@ -261,4 +261,12 @@ describe('Getters Notes Store', () => {
expect(getters.firstUnresolvedDiscussionId(state, localGettersFalsy)(false)).toBeFalsy();
});
});
+
+ describe('getDiscussion', () => {
+ it('returns discussion by ID', () => {
+ state.discussions.push({ id: '1' });
+
+ expect(getters.getDiscussion(state)('1')).toEqual({ id: '1' });
+ });
+ });
});
diff --git a/spec/javascripts/notes/stores/mutation_spec.js b/spec/javascripts/notes/stores/mutation_spec.js
index fcad1f245b6..4a640d589fb 100644
--- a/spec/javascripts/notes/stores/mutation_spec.js
+++ b/spec/javascripts/notes/stores/mutation_spec.js
@@ -1,5 +1,6 @@
import Vue from 'vue';
import mutations from '~/notes/stores/mutations';
+import { DISCUSSION_NOTE } from '~/notes/constants';
import {
note,
discussionMock,
@@ -326,6 +327,18 @@ describe('Notes Store mutations', () => {
expect(state.discussions[0].notes[0].note).toEqual('Foo');
});
+
+ it('transforms an individual note to discussion', () => {
+ const state = {
+ discussions: [individualNote],
+ };
+
+ const transformedNote = { ...individualNote.notes[0], type: DISCUSSION_NOTE };
+
+ mutations.UPDATE_NOTE(state, transformedNote);
+
+ expect(state.discussions[0].individual_note).toEqual(false);
+ });
});
describe('CLOSE_ISSUE', () => {
@@ -530,7 +543,7 @@ describe('Notes Store mutations', () => {
state = { convertedDisscussionIds: [] };
});
- it('adds a disucssion to convertedDisscussionIds', () => {
+ it('adds a discussion to convertedDisscussionIds', () => {
mutations.CONVERT_TO_DISCUSSION(state, discussion.id);
expect(state.convertedDisscussionIds).toContain(discussion.id);
@@ -549,7 +562,7 @@ describe('Notes Store mutations', () => {
state = { convertedDisscussionIds: [41, 42] };
});
- it('removes a disucssion from convertedDisscussionIds', () => {
+ it('removes a discussion from convertedDisscussionIds', () => {
mutations.REMOVE_CONVERTED_DISCUSSION(state, discussion.id);
expect(state.convertedDisscussionIds).not.toContain(discussion.id);
diff --git a/spec/javascripts/persistent_user_callout_spec.js b/spec/javascripts/persistent_user_callout_spec.js
new file mode 100644
index 00000000000..2fdfff3db03
--- /dev/null
+++ b/spec/javascripts/persistent_user_callout_spec.js
@@ -0,0 +1,88 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import PersistentUserCallout from '~/persistent_user_callout';
+import setTimeoutPromise from 'spec/helpers/set_timeout_promise_helper';
+
+describe('PersistentUserCallout', () => {
+ const dismissEndpoint = '/dismiss';
+ const featureName = 'feature';
+
+ function createFixture() {
+ const fixture = document.createElement('div');
+ fixture.innerHTML = `
+ <div
+ class="container"
+ data-dismiss-endpoint="${dismissEndpoint}"
+ data-feature-id="${featureName}"
+ >
+ <button type="button" class="js-close"></button>
+ </div>
+ `;
+
+ return fixture;
+ }
+
+ describe('dismiss', () => {
+ let button;
+ let mockAxios;
+ let persistentUserCallout;
+
+ beforeEach(() => {
+ const fixture = createFixture();
+ const container = fixture.querySelector('.container');
+ button = fixture.querySelector('.js-close');
+ mockAxios = new MockAdapter(axios);
+ persistentUserCallout = new PersistentUserCallout(container);
+ spyOn(persistentUserCallout.container, 'remove');
+ });
+
+ afterEach(() => {
+ mockAxios.restore();
+ });
+
+ it('POSTs endpoint and removes container when clicking close', done => {
+ mockAxios.onPost(dismissEndpoint).replyOnce(200);
+
+ button.click();
+
+ setTimeoutPromise()
+ .then(() => {
+ expect(persistentUserCallout.container.remove).toHaveBeenCalled();
+ expect(mockAxios.history.post[0].data).toBe(
+ JSON.stringify({ feature_name: featureName }),
+ );
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('invokes Flash when the dismiss request fails', done => {
+ const Flash = spyOnDependency(PersistentUserCallout, 'Flash');
+ mockAxios.onPost(dismissEndpoint).replyOnce(500);
+
+ button.click();
+
+ setTimeoutPromise()
+ .then(() => {
+ expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
+ expect(Flash).toHaveBeenCalledWith(
+ 'An error occurred while dismissing the alert. Refresh the page and try again.',
+ );
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('factory', () => {
+ it('returns an instance of PersistentUserCallout with the provided container property', () => {
+ const fixture = createFixture();
+
+ expect(PersistentUserCallout.factory(fixture) instanceof PersistentUserCallout).toBe(true);
+ });
+
+ it('returns undefined if container is falsey', () => {
+ expect(PersistentUserCallout.factory()).toBe(undefined);
+ });
+ });
+});
diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js
index b2b0a50911d..5eef5682bbd 100644
--- a/spec/javascripts/test_bundle.js
+++ b/spec/javascripts/test_bundle.js
@@ -8,6 +8,7 @@ import '~/commons';
import Vue from 'vue';
import VueResource from 'vue-resource';
import Translate from '~/vue_shared/translate';
+import CheckEE from '~/vue_shared/mixins/is_ee';
import jasmineDiff from 'jasmine-diff';
import { getDefaultAdapter } from '~/lib/utils/axios_utils';
@@ -43,6 +44,7 @@ Vue.config.errorHandler = function(err) {
Vue.use(VueResource);
Vue.use(Translate);
+Vue.use(CheckEE);
// enable test fixtures
jasmine.getFixtures().fixturesPath = FIXTURES_PATH;
@@ -67,6 +69,7 @@ window.gl = window.gl || {};
window.gl.TEST_HOST = TEST_HOST;
window.gon = window.gon || {};
window.gon.test_env = true;
+window.gon.ee = false;
gon.relative_url_root = '';
let hasUnhandledPromiseRejections = false;
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 631da202d1d..08e173b0a10 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -673,7 +673,7 @@ describe('ReadyToMerge', () => {
.at(0)
.props('label');
- it('should have two edit components when squash is enabled', () => {
+ it('should have two edit components when squash is enabled and there is more than 1 commit', () => {
createLocalComponent({
mr: {
commitsCount: 2,
@@ -685,6 +685,18 @@ describe('ReadyToMerge', () => {
expect(findCommitEditElements().length).toBe(2);
});
+ it('should have one edit components when squash is enabled and there is 1 commit only', () => {
+ createLocalComponent({
+ mr: {
+ commitsCount: 1,
+ squash: true,
+ enableSquashBeforeMerge: true,
+ },
+ });
+
+ expect(findCommitEditElements().length).toBe(1);
+ });
+
it('should have correct edit merge commit label', () => {
createLocalComponent();
@@ -711,8 +723,10 @@ describe('ReadyToMerge', () => {
expect(findCommitDropdownElement().exists()).toBeFalsy();
});
- it('should be rendered if squash is enabled', () => {
- createLocalComponent({ mr: { squash: true } });
+ it('should be rendered if squash is enabled and there is more than 1 commit', () => {
+ createLocalComponent({
+ mr: { enableSquashBeforeMerge: true, squash: true, commitsCount: 2 },
+ });
expect(findCommitDropdownElement().exists()).toBeTruthy();
});
diff --git a/spec/javascripts/vue_shared/components/issue/related_issuable_item_spec.js b/spec/javascripts/vue_shared/components/issue/related_issuable_item_spec.js
new file mode 100644
index 00000000000..42198e92eea
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/issue/related_issuable_item_spec.js
@@ -0,0 +1,194 @@
+import Vue from 'vue';
+import { mount, createLocalVue } from '@vue/test-utils';
+import RelatedIssuableItem from '~/vue_shared/components/issue/related_issuable_item.vue';
+import { defaultMilestone, defaultAssignees } from './related_issuable_mock_data';
+
+describe('RelatedIssuableItem', () => {
+ let wrapper;
+ const props = {
+ idKey: 1,
+ displayReference: 'gitlab-org/gitlab-test#1',
+ pathIdSeparator: '#',
+ path: `${gl.TEST_HOST}/path`,
+ title: 'title',
+ confidential: true,
+ dueDate: '1990-12-31',
+ weight: 10,
+ createdAt: '2018-12-01T00:00:00.00Z',
+ milestone: defaultMilestone,
+ assignees: defaultAssignees,
+ eventNamespace: 'relatedIssue',
+ };
+ const slots = {
+ dueDate: '<div class="js-due-date-slot"></div>',
+ weight: '<div class="js-weight-slot"></div>',
+ };
+
+ beforeEach(() => {
+ const localVue = createLocalVue();
+
+ wrapper = mount(localVue.extend(RelatedIssuableItem), {
+ localVue,
+ slots,
+ sync: false,
+ propsData: props,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains issuable-info-container class when canReorder is false', () => {
+ expect(wrapper.props('canReorder')).toBe(false);
+ expect(wrapper.find('.issuable-info-container').exists()).toBe(true);
+ });
+
+ it('does not render token state', () => {
+ expect(wrapper.find('.text-secondary svg').exists()).toBe(false);
+ });
+
+ it('does not render remove button', () => {
+ expect(wrapper.find({ ref: 'removeButton' }).exists()).toBe(false);
+ });
+
+ describe('token title', () => {
+ it('links to computedPath', () => {
+ expect(wrapper.find('.item-title a').attributes('href')).toEqual(wrapper.props('path'));
+ });
+
+ it('renders confidential icon', () => {
+ expect(wrapper.find('.confidential-icon').exists()).toBe(true);
+ });
+
+ it('renders title', () => {
+ expect(wrapper.find('.item-title a').text()).toEqual(props.title);
+ });
+ });
+
+ describe('token state', () => {
+ let tokenState;
+
+ beforeEach(done => {
+ wrapper.setProps({ state: 'opened' });
+
+ Vue.nextTick(() => {
+ tokenState = wrapper.find('.issue-token-state-icon-open');
+
+ done();
+ });
+ });
+
+ it('renders if hasState', () => {
+ expect(tokenState.exists()).toBe(true);
+ });
+
+ it('renders state title', () => {
+ const stateTitle = tokenState.attributes('data-original-title');
+
+ expect(stateTitle).toContain('<span class="bold">Opened</span>');
+ expect(stateTitle).toContain(
+ '<span class="text-tertiary">Dec 1, 2018 12:00am GMT+0000</span>',
+ );
+ });
+
+ it('renders aria label', () => {
+ expect(tokenState.attributes('aria-label')).toEqual('opened');
+ });
+
+ it('renders open icon when open state', () => {
+ expect(tokenState.classes('issue-token-state-icon-open')).toBe(true);
+ });
+
+ it('renders close icon when close state', done => {
+ wrapper.setProps({
+ state: 'closed',
+ closedAt: '2018-12-01T00:00:00.00Z',
+ });
+
+ Vue.nextTick(() => {
+ expect(tokenState.classes('issue-token-state-icon-closed')).toBe(true);
+
+ done();
+ });
+ });
+ });
+
+ describe('token metadata', () => {
+ let tokenMetadata;
+
+ beforeEach(done => {
+ Vue.nextTick(() => {
+ tokenMetadata = wrapper.find('.item-meta');
+
+ done();
+ });
+ });
+
+ it('renders item path and ID', () => {
+ const pathAndID = tokenMetadata.find('.item-path-id').text();
+
+ expect(pathAndID).toContain('gitlab-org/gitlab-test');
+ expect(pathAndID).toContain('#1');
+ });
+
+ it('renders milestone icon and name', () => {
+ const milestoneIcon = tokenMetadata.find('.item-milestone svg use');
+ const milestoneTitle = tokenMetadata.find('.item-milestone .milestone-title');
+
+ expect(milestoneIcon.attributes('href')).toContain('clock');
+ expect(milestoneTitle.text()).toContain('Milestone title');
+ });
+
+ it('renders due date component', () => {
+ expect(tokenMetadata.find('.js-due-date-slot').exists()).toBe(true);
+ });
+
+ it('renders weight component', () => {
+ expect(tokenMetadata.find('.js-weight-slot').exists()).toBe(true);
+ });
+ });
+
+ describe('token assignees', () => {
+ it('renders assignees avatars', () => {
+ expect(wrapper.findAll('.item-assignees .user-avatar-link').length).toBe(2);
+ expect(wrapper.find('.item-assignees .avatar-counter').text()).toContain('+2');
+ });
+ });
+
+ describe('remove button', () => {
+ let removeBtn;
+
+ beforeEach(done => {
+ wrapper.setProps({ canRemove: true });
+ Vue.nextTick(() => {
+ removeBtn = wrapper.find({ ref: 'removeButton' });
+
+ done();
+ });
+ });
+
+ it('renders if canRemove', () => {
+ expect(removeBtn.exists()).toBe(true);
+ });
+
+ it('renders disabled button when removeDisabled', done => {
+ wrapper.vm.removeDisabled = true;
+
+ Vue.nextTick(() => {
+ expect(removeBtn.attributes('disabled')).toEqual('disabled');
+
+ done();
+ });
+ });
+
+ it('triggers onRemoveRequest when clicked', () => {
+ removeBtn.trigger('click');
+
+ const { relatedIssueRemoveRequest } = wrapper.emitted();
+
+ expect(relatedIssueRemoveRequest.length).toBe(1);
+ expect(relatedIssueRemoveRequest[0]).toEqual([props.idKey]);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js b/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js
new file mode 100644
index 00000000000..26bfdd7551e
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/issue/related_issuable_mock_data.js
@@ -0,0 +1,111 @@
+export const defaultProps = {
+ endpoint: '/foo/bar/issues/1/related_issues',
+ currentNamespacePath: 'foo',
+ currentProjectPath: 'bar',
+};
+
+export const issuable1 = {
+ id: 200,
+ epic_issue_id: 1,
+ confidential: false,
+ reference: 'foo/bar#123',
+ displayReference: '#123',
+ title: 'some title',
+ path: '/foo/bar/issues/123',
+ state: 'opened',
+};
+
+export const issuable2 = {
+ id: 201,
+ epic_issue_id: 2,
+ confidential: false,
+ reference: 'foo/bar#124',
+ displayReference: '#124',
+ title: 'some other thing',
+ path: '/foo/bar/issues/124',
+ state: 'opened',
+};
+
+export const issuable3 = {
+ id: 202,
+ epic_issue_id: 3,
+ confidential: false,
+ reference: 'foo/bar#125',
+ displayReference: '#125',
+ title: 'some other other thing',
+ path: '/foo/bar/issues/125',
+ state: 'opened',
+};
+
+export const issuable4 = {
+ id: 203,
+ epic_issue_id: 4,
+ confidential: false,
+ reference: 'foo/bar#126',
+ displayReference: '#126',
+ title: 'some other other other thing',
+ path: '/foo/bar/issues/126',
+ state: 'opened',
+};
+
+export const issuable5 = {
+ id: 204,
+ epic_issue_id: 5,
+ confidential: false,
+ reference: 'foo/bar#127',
+ displayReference: '#127',
+ title: 'some other other other thing',
+ path: '/foo/bar/issues/127',
+ state: 'opened',
+};
+
+export const defaultMilestone = {
+ id: 1,
+ state: 'active',
+ title: 'Milestone title',
+ start_date: '2018-01-01',
+ due_date: '2019-12-31',
+};
+
+export const defaultAssignees = [
+ {
+ id: 1,
+ name: 'Administrator',
+ username: 'root',
+ state: 'active',
+ avatar_url: `${gl.TEST_HOST}`,
+ web_url: `${gl.TEST_HOST}/root`,
+ status_tooltip_html: null,
+ path: '/root',
+ },
+ {
+ id: 13,
+ name: 'Brooks Beatty',
+ username: 'brynn_champlin',
+ state: 'active',
+ avatar_url: `${gl.TEST_HOST}`,
+ web_url: `${gl.TEST_HOST}/brynn_champlin`,
+ status_tooltip_html: null,
+ path: '/brynn_champlin',
+ },
+ {
+ id: 6,
+ name: 'Bryce Turcotte',
+ username: 'melynda',
+ state: 'active',
+ avatar_url: `${gl.TEST_HOST}`,
+ web_url: `${gl.TEST_HOST}/melynda`,
+ status_tooltip_html: null,
+ path: '/melynda',
+ },
+ {
+ id: 20,
+ name: 'Conchita Eichmann',
+ username: 'juliana_gulgowski',
+ state: 'active',
+ avatar_url: `${gl.TEST_HOST}`,
+ web_url: `${gl.TEST_HOST}/juliana_gulgowski`,
+ status_tooltip_html: null,
+ path: '/juliana_gulgowski',
+ },
+];
diff --git a/spec/javascripts/vue_shared/components/table_pagination_spec.js b/spec/javascripts/vue_shared/components/table_pagination_spec.js
index 407d1d59f83..42cd41381dc 100644
--- a/spec/javascripts/vue_shared/components/table_pagination_spec.js
+++ b/spec/javascripts/vue_shared/components/table_pagination_spec.js
@@ -22,10 +22,10 @@ describe('Pagination component', () => {
it('should not render anything', () => {
component = mountComponent({
pageInfo: {
- nextPage: 1,
+ nextPage: NaN,
page: 1,
perPage: 20,
- previousPage: null,
+ previousPage: NaN,
total: 15,
totalPages: 1,
},
@@ -58,6 +58,28 @@ describe('Pagination component', () => {
expect(spy).not.toHaveBeenCalled();
});
+ it('should be disabled and non clickable when total and totalPages are NaN', () => {
+ component = mountComponent({
+ pageInfo: {
+ nextPage: 2,
+ page: 1,
+ perPage: 20,
+ previousPage: NaN,
+ total: NaN,
+ totalPages: NaN,
+ },
+ change: spy,
+ });
+
+ expect(
+ component.$el.querySelector('.js-previous-button').classList.contains('disabled'),
+ ).toEqual(true);
+
+ component.$el.querySelector('.js-previous-button .page-link').click();
+
+ expect(spy).not.toHaveBeenCalled();
+ });
+
it('should be enabled and clickable', () => {
component = mountComponent({
pageInfo: {
@@ -75,6 +97,24 @@ describe('Pagination component', () => {
expect(spy).toHaveBeenCalledWith(1);
});
+
+ it('should be enabled and clickable when total and totalPages are NaN', () => {
+ component = mountComponent({
+ pageInfo: {
+ nextPage: 3,
+ page: 2,
+ perPage: 20,
+ previousPage: 1,
+ total: NaN,
+ totalPages: NaN,
+ },
+ change: spy,
+ });
+
+ component.$el.querySelector('.js-previous-button .page-link').click();
+
+ expect(spy).toHaveBeenCalledWith(1);
+ });
});
describe('first button', () => {
@@ -99,6 +139,28 @@ describe('Pagination component', () => {
expect(spy).toHaveBeenCalledWith(1);
});
+
+ it('should call the change callback with the first page when total and totalPages are NaN', () => {
+ component = mountComponent({
+ pageInfo: {
+ nextPage: 3,
+ page: 2,
+ perPage: 20,
+ previousPage: 1,
+ total: NaN,
+ totalPages: NaN,
+ },
+ change: spy,
+ });
+
+ const button = component.$el.querySelector('.js-first-button .page-link');
+
+ expect(button.textContent.trim()).toEqual('« First');
+
+ button.click();
+
+ expect(spy).toHaveBeenCalledWith(1);
+ });
});
describe('last button', () => {
@@ -123,16 +185,32 @@ describe('Pagination component', () => {
expect(spy).toHaveBeenCalledWith(5);
});
+
+ it('should not render', () => {
+ component = mountComponent({
+ pageInfo: {
+ nextPage: 3,
+ page: 2,
+ perPage: 20,
+ previousPage: 1,
+ total: NaN,
+ totalPages: NaN,
+ },
+ change: spy,
+ });
+
+ expect(component.$el.querySelector('.js-last-button .page-link')).toBeNull();
+ });
});
describe('next button', () => {
it('should be disabled and non clickable', () => {
component = mountComponent({
pageInfo: {
- nextPage: 5,
+ nextPage: NaN,
page: 5,
perPage: 20,
- previousPage: 1,
+ previousPage: 4,
total: 84,
totalPages: 5,
},
@@ -146,6 +224,26 @@ describe('Pagination component', () => {
expect(spy).not.toHaveBeenCalled();
});
+ it('should be disabled and non clickable when total and totalPages are NaN', () => {
+ component = mountComponent({
+ pageInfo: {
+ nextPage: NaN,
+ page: 5,
+ perPage: 20,
+ previousPage: 4,
+ total: NaN,
+ totalPages: NaN,
+ },
+ change: spy,
+ });
+
+ expect(component.$el.querySelector('.js-next-button').textContent.trim()).toEqual('Next');
+
+ component.$el.querySelector('.js-next-button .page-link').click();
+
+ expect(spy).not.toHaveBeenCalled();
+ });
+
it('should be enabled and clickable', () => {
component = mountComponent({
pageInfo: {
@@ -163,6 +261,24 @@ describe('Pagination component', () => {
expect(spy).toHaveBeenCalledWith(4);
});
+
+ it('should be enabled and clickable when total and totalPages are NaN', () => {
+ component = mountComponent({
+ pageInfo: {
+ nextPage: 4,
+ page: 3,
+ perPage: 20,
+ previousPage: 2,
+ total: NaN,
+ totalPages: NaN,
+ },
+ change: spy,
+ });
+
+ component.$el.querySelector('.js-next-button .page-link').click();
+
+ expect(spy).toHaveBeenCalledWith(4);
+ });
});
describe('numbered buttons', () => {
@@ -181,22 +297,56 @@ describe('Pagination component', () => {
expect(component.$el.querySelectorAll('.page').length).toEqual(5);
});
+
+ it('should not render any page', () => {
+ component = mountComponent({
+ pageInfo: {
+ nextPage: 4,
+ page: 3,
+ perPage: 20,
+ previousPage: 2,
+ total: NaN,
+ totalPages: NaN,
+ },
+ change: spy,
+ });
+
+ expect(component.$el.querySelectorAll('.page').length).toEqual(0);
+ });
});
- it('should render the spread operator', () => {
- component = mountComponent({
- pageInfo: {
- nextPage: 4,
- page: 3,
- perPage: 20,
- previousPage: 2,
- total: 84,
- totalPages: 10,
- },
- change: spy,
+ describe('spread operator', () => {
+ it('should render', () => {
+ component = mountComponent({
+ pageInfo: {
+ nextPage: 4,
+ page: 3,
+ perPage: 20,
+ previousPage: 2,
+ total: 84,
+ totalPages: 10,
+ },
+ change: spy,
+ });
+
+ expect(component.$el.querySelector('.separator').textContent.trim()).toEqual('...');
});
- expect(component.$el.querySelector('.separator').textContent.trim()).toEqual('...');
+ it('should not render', () => {
+ component = mountComponent({
+ pageInfo: {
+ nextPage: 4,
+ page: 3,
+ perPage: 20,
+ previousPage: 2,
+ total: NaN,
+ totalPages: NaN,
+ },
+ change: spy,
+ });
+
+ expect(component.$el.querySelector('.separator')).toBeNull();
+ });
});
});
});
diff --git a/spec/lib/constraints/project_url_constrainer_spec.rb b/spec/lib/constraints/project_url_constrainer_spec.rb
index c96e7ab8495..3496b01ebcc 100644
--- a/spec/lib/constraints/project_url_constrainer_spec.rb
+++ b/spec/lib/constraints/project_url_constrainer_spec.rb
@@ -16,6 +16,10 @@ describe Constraints::ProjectUrlConstrainer do
let(:request) { build_request('foo', 'bar') }
it { expect(subject.matches?(request)).to be_falsey }
+
+ context 'existence_check is false' do
+ it { expect(subject.matches?(request, existence_check: false)).to be_truthy }
+ end
end
context "project id ending with .git" do
diff --git a/spec/lib/event_filter_spec.rb b/spec/lib/event_filter_spec.rb
index 30016da6828..6648e141b7a 100644
--- a/spec/lib/event_filter_spec.rb
+++ b/spec/lib/event_filter_spec.rb
@@ -26,10 +26,10 @@ describe EventFilter do
set(:push_event) { create(:push_event, project: public_project) }
set(:merged_event) { create(:event, :merged, project: public_project, target: public_project) }
- set(:created_event) { create(:event, :created, project: public_project, target: public_project) }
- set(:updated_event) { create(:event, :updated, project: public_project, target: public_project) }
- set(:closed_event) { create(:event, :closed, project: public_project, target: public_project) }
- set(:reopened_event) { create(:event, :reopened, project: public_project, target: public_project) }
+ set(:created_event) { create(:event, :created, project: public_project, target: create(:issue, project: public_project)) }
+ set(:updated_event) { create(:event, :updated, project: public_project, target: create(:issue, project: public_project)) }
+ set(:closed_event) { create(:event, :closed, project: public_project, target: create(:issue, project: public_project)) }
+ set(:reopened_event) { create(:event, :reopened, project: public_project, target: create(:issue, project: public_project)) }
set(:comments_event) { create(:event, :commented, project: public_project, target: public_project) }
set(:joined_event) { create(:event, :joined, project: public_project, target: public_project) }
set(:left_event) { create(:event, :left, project: public_project, target: public_project) }
diff --git a/spec/lib/gitlab/auth/ldap/config_spec.rb b/spec/lib/gitlab/auth/ldap/config_spec.rb
index d3ab599d5a0..b91a09e3137 100644
--- a/spec/lib/gitlab/auth/ldap/config_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/config_spec.rb
@@ -5,6 +5,65 @@ describe Gitlab::Auth::LDAP::Config do
let(:config) { described_class.new('ldapmain') }
+ def raw_cert
+ <<-EOS
+-----BEGIN CERTIFICATE-----
+MIIDZjCCAk4CCQDX+u/9fICksDANBgkqhkiG9w0BAQsFADB1MQswCQYDVQQGEwJV
+UzEMMAoGA1UECAwDRm9vMQwwCgYDVQQHDANCYXIxDDAKBgNVBAoMA0JhejEMMAoG
+A1UECwwDUXV4MQ0wCwYDVQQDDARsZGFwMR8wHQYJKoZIhvcNAQkBFhBsZGFwQGV4
+YW1wbGUuY29tMB4XDTE5MDIyNzE1NTUxNFoXDTE5MDMyOTE1NTUxNFowdTELMAkG
+A1UEBhMCVVMxDDAKBgNVBAgMA0ZvbzEMMAoGA1UEBwwDQmFyMQwwCgYDVQQKDANC
+YXoxDDAKBgNVBAsMA1F1eDENMAsGA1UEAwwEbGRhcDEfMB0GCSqGSIb3DQEJARYQ
+bGRhcEBleGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+APuDB/4/AUmTEmhYzN13no4Kt8hkRbLQuENRHlOeQw05/MVdoB1AWLOPzIXn4kex
+GD9tHkoJl8S0QPmAAcPHn5O97e+gd0ze5dRQZl/cSd2/j5zeaMvZ1mCrPN/dOluM
+94Oj+wQU4bEcOlrqIMSh0ezJw10R3IHXCQFeGtIZU57WmKcrryQX4kP7KTOgRw/t
+CYp+NivQHtLbBEj1MU0l10qMS2+w8Qpqov4MdW4gx4wTgId2j1ZZ56+n6Jsc9qoI
+wBWBNL4XU5a3kwhYZDOJoOvI9po33KLdT1dXS81uOFXClp3LGmKDgLTwQ1w+RmQG
++JG4EvTfDIShdcTDXEaOfCECAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAJM9Btu5g
+k8qDiz5TilvpyoGuI4viCwusARFAFmOB/my/cHlVvkuq4bbfV1KJoWWGJg8GcklL
+cnIdxc35uYM5icr6xXQyrW0GqAO+LEXyUxVQqYETxrQ/LJ03xhBnuF7hvZJIBiky
+GwUy0clJxGfaCeEM8zXwePawLgGjuUawDDQOwigysoWqoMu3VFW8zl8UPa84bow9
+Kn2QmPAkLw4EcqYSCNSSvnyzu5SM64jwLWRXFsmlqD7773oT29vTkqM1EQANFEfT
+7gQomLyPqoPBoFph5oSNn6Rf31QX1Sie92EAKVnZ1XmD68hKzjv6ChCtzTv4jABg
+XrDwnLkORIAF/Q==
+-----END CERTIFICATE-----
+ EOS
+ end
+
+ def raw_key
+ <<-EOS
+-----BEGIN PRIVATE KEY-----
+MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQD7gwf+PwFJkxJo
+WMzdd56OCrfIZEWy0LhDUR5TnkMNOfzFXaAdQFizj8yF5+JHsRg/bR5KCZfEtED5
+gAHDx5+Tve3voHdM3uXUUGZf3Endv4+c3mjL2dZgqzzf3TpbjPeDo/sEFOGxHDpa
+6iDEodHsycNdEdyB1wkBXhrSGVOe1pinK68kF+JD+ykzoEcP7QmKfjYr0B7S2wRI
+9TFNJddKjEtvsPEKaqL+DHVuIMeME4CHdo9WWeevp+ibHPaqCMAVgTS+F1OWt5MI
+WGQziaDryPaaN9yi3U9XV0vNbjhVwpadyxpig4C08ENcPkZkBviRuBL03wyEoXXE
+w1xGjnwhAgMBAAECggEAbw82GVui6uUpjLAhjm3CssAi1TcJ2+L0aq1IMe5Bd3ay
+mkg0apY+VNPboQl6zuNxbJh3doPz42UhB8sxfE0Ktwd4KIb4Bxap7+2stwmkCGoN
+NVy0c8d2NWuHzuZ2XXTK2vMu5Wd/HWD0l66o14sJEoEpZlB7yU216UevmjSayxjh
+aBTSaYyyrf24haTaCuqwph/V73ZlMpFdSALGny0uiP/5inxciMCkMpHfX6BflSb4
+EGKsIYt9BJ0kY4GNG5bCP7971UCxp2eEJhU2fV8HuFGCOD12IqSpUqPxHxjsWpfx
+T7FZ3V2kM/58Ca+5LB2y3atcPIdY0/g7/43V4VD+7QKBgQD/PO4/0cmZuuLU1LPT
+C/C596kPK0JLlvvRqhbz4byRAkW/n7uQFG7TMtFNle3UmT7rk7pjtbHnByqzEd+9
+jMhBysjHOMg0+DWm7fEtSg/tJ3qLVO3nbdA4qmXYobLcLoG+PCYRLskEHHqTG/Bv
+QZLbavOU6rrTqckNr1TMpNBmXwKBgQD8Q0C2YTOpwgjRUe8i6Chnc3o4x8a1i98y
+9la6c7y7acWHSbEczMkNfEBrbM73rTb+bBA0Zqw+Z1gkv8bGpvGxX8kbSfJJ2YKW
+9koxpLNTVNVapqBa9ImiaozV285dz9Ukx8bnMOJlTELpOl7RRV7iF0smYjfHIl3D
+Yxyda/MtfwKBgHb9l/Dmw77IkqE4PFFimqqIHCe3OiP1UpavXh36midcUNoCBLYp
+4HTTlyI9iG/5tYysBVQgy7xx6eUrqww6Ss3pVOsTvLp9EL4u5aYAhiZApm+4e2TO
+HCmevvZcg/8EK3Zdoj2Wex5QjJBykQe9IVLrrH07ZTfySon3uGfjWkivAoGAGvqS
+VC8HGHOw/7n0ilYr5Ax8mM/813OzFj80PVKdb6m7P2HJOFxKcE/Gj/aeF+0FgaZL
+AV+tsirZSWzdNGesV5z35Bw/dlh11/FVNAP6TcI34y8I3VFj2uPsVf7hDjVpBTr8
+ccNPoyfJzCm69ESoBiQZnGxKrNhnELtr1wYxhr8CgYApWwf4hVrTWV1zs+pEJenh
+AtlErSqafbECNDSwS5BX8yDpu5yRBJ4xegO/rNlmb8ICRYkuJapD1xXicFOsmfUK
+0Ff8afd2Q/OfBeUdq9KA4JO9fNqzEwOWvv8Ryn4ZSYcAuLP7IVJKjjI6R7rYaO/G
+3OWJdizbykGOi0BFDu+3dw==
+-----END PRIVATE KEY-----
+ EOS
+ end
+
describe '.servers' do
it 'returns empty array if no server information is available' do
allow(Gitlab.config).to receive(:ldap).and_return('enabled' => false)
@@ -89,6 +148,42 @@ describe Gitlab::Auth::LDAP::Config do
expect(config.adapter_options[:encryption]).to include({ method: :start_tls })
end
+ it 'transforms SSL cert and key to OpenSSL objects' do
+ stub_ldap_config(
+ options: {
+ 'host' => 'ldap.example.com',
+ 'port' => 686,
+ 'encryption' => 'start_tls',
+ 'tls_options' => {
+ 'cert' => raw_cert,
+ 'key' => raw_key
+ }
+ }
+ )
+
+ expect(config.adapter_options[:encryption][:tls_options][:cert]).to be_a(OpenSSL::X509::Certificate)
+ expect(config.adapter_options[:encryption][:tls_options][:key]).to be_a(OpenSSL::PKey::RSA)
+ end
+
+ it 'logs an error when an invalid key or cert are configured' do
+ allow(Rails.logger).to receive(:error)
+ stub_ldap_config(
+ options: {
+ 'host' => 'ldap.example.com',
+ 'port' => 686,
+ 'encryption' => 'start_tls',
+ 'tls_options' => {
+ 'cert' => 'invalid cert',
+ 'key' => 'invalid_key'
+ }
+ }
+ )
+
+ config.adapter_options
+
+ expect(Rails.logger).to have_received(:error).with(/LDAP TLS Options/).twice
+ end
+
context 'when verify_certificates is enabled' do
it 'sets tls_options to OpenSSL defaults' do
stub_ldap_config(
@@ -130,7 +225,9 @@ describe Gitlab::Auth::LDAP::Config do
'host' => 'ldap.example.com',
'port' => 686,
'encryption' => 'simple_tls',
- 'ca_file' => '/etc/ca.pem'
+ 'tls_options' => {
+ 'ca_file' => '/etc/ca.pem'
+ }
}
)
@@ -145,7 +242,9 @@ describe Gitlab::Auth::LDAP::Config do
'host' => 'ldap.example.com',
'port' => 686,
'encryption' => 'simple_tls',
- 'ca_file' => ' '
+ 'tls_options' => {
+ 'ca_file' => ' '
+ }
}
)
@@ -160,7 +259,9 @@ describe Gitlab::Auth::LDAP::Config do
'host' => 'ldap.example.com',
'port' => 686,
'encryption' => 'simple_tls',
- 'ssl_version' => 'TLSv1_2'
+ 'tls_options' => {
+ 'ssl_version' => 'TLSv1_2'
+ }
}
)
@@ -175,7 +276,9 @@ describe Gitlab::Auth::LDAP::Config do
'host' => 'ldap.example.com',
'port' => 686,
'encryption' => 'simple_tls',
- 'ssl_version' => ' '
+ 'tls_options' => {
+ 'ssl_version' => ' '
+ }
}
)
@@ -223,6 +326,23 @@ describe Gitlab::Auth::LDAP::Config do
)
end
+ it 'transforms SSL cert and key to OpenSSL objects' do
+ stub_ldap_config(
+ options: {
+ 'host' => 'ldap.example.com',
+ 'port' => 686,
+ 'encryption' => 'start_tls',
+ 'tls_options' => {
+ 'cert' => raw_cert,
+ 'key' => raw_key
+ }
+ }
+ )
+
+ expect(config.omniauth_options[:tls_options][:cert]).to be_a(OpenSSL::X509::Certificate)
+ expect(config.omniauth_options[:tls_options][:key]).to be_a(OpenSSL::PKey::RSA)
+ end
+
context 'when verify_certificates is enabled' do
it 'specifies disable_verify_certificates as false' do
stub_ldap_config(
@@ -261,11 +381,13 @@ describe Gitlab::Auth::LDAP::Config do
'port' => 686,
'encryption' => 'simple_tls',
'verify_certificates' => true,
- 'ca_file' => '/etc/ca.pem'
+ 'tls_options' => {
+ 'ca_file' => '/etc/ca.pem'
+ }
}
)
- expect(config.omniauth_options).to include({ ca_file: '/etc/ca.pem' })
+ expect(config.omniauth_options[:tls_options]).to include({ ca_file: '/etc/ca.pem' })
end
end
@@ -277,11 +399,13 @@ describe Gitlab::Auth::LDAP::Config do
'port' => 686,
'encryption' => 'simple_tls',
'verify_certificates' => true,
- 'ca_file' => ' '
+ 'tls_options' => {
+ 'ca_file' => ' '
+ }
}
)
- expect(config.omniauth_options).not_to have_key(:ca_file)
+ expect(config.omniauth_options[:tls_options]).not_to have_key(:ca_file)
end
end
@@ -293,11 +417,13 @@ describe Gitlab::Auth::LDAP::Config do
'port' => 686,
'encryption' => 'simple_tls',
'verify_certificates' => true,
- 'ssl_version' => 'TLSv1_2'
+ 'tls_options' => {
+ 'ssl_version' => 'TLSv1_2'
+ }
}
)
- expect(config.omniauth_options).to include({ ssl_version: 'TLSv1_2' })
+ expect(config.omniauth_options[:tls_options]).to include({ ssl_version: 'TLSv1_2' })
end
end
@@ -309,11 +435,14 @@ describe Gitlab::Auth::LDAP::Config do
'port' => 686,
'encryption' => 'simple_tls',
'verify_certificates' => true,
- 'ssl_version' => ' '
+ 'tls_options' => {
+ 'ssl_version' => ' '
+ }
}
)
- expect(config.omniauth_options).not_to have_key(:ssl_version)
+ # OpenSSL default params includes `ssl_version` so we just check that it's not blank
+ expect(config.omniauth_options[:tls_options]).not_to include({ ssl_version: ' ' })
end
end
end
diff --git a/spec/lib/gitlab/checks/branch_check_spec.rb b/spec/lib/gitlab/checks/branch_check_spec.rb
index 77366e91dca..12beeecd470 100644
--- a/spec/lib/gitlab/checks/branch_check_spec.rb
+++ b/spec/lib/gitlab/checks/branch_check_spec.rb
@@ -48,10 +48,128 @@ describe Gitlab::Checks::BranchCheck do
context 'when project repository is empty' do
let(:project) { create(:project) }
- it 'raises an error if the user is not allowed to push to protected branches' do
- expect(user_access).to receive(:can_push_to_branch?).and_return(false)
+ context 'user is not allowed to push to protected branches' do
+ before do
+ allow(user_access)
+ .to receive(:can_push_to_branch?)
+ .and_return(false)
+ end
+
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /Ask a project Owner or Maintainer to create a default branch/)
+ end
+ end
+
+ context 'user is allowed to push to protected branches' do
+ before do
+ allow(user_access)
+ .to receive(:can_push_to_branch?)
+ .and_return(true)
+ end
+
+ it 'allows branch creation' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+ end
+
+ context 'branch creation' do
+ let(:oldrev) { '0000000000000000000000000000000000000000' }
+ let(:ref) { 'refs/heads/feature' }
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /Ask a project Owner or Maintainer to create a default branch/)
+ context 'protected branch creation feature is disabled' do
+ before do
+ stub_feature_flags(protected_branch_creation: false)
+ end
+
+ context 'user is not allowed to push to protected branch' do
+ before do
+ allow(user_access)
+ .to receive(:can_push_to_branch?)
+ .and_return(false)
+ end
+
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to push code to protected branches on this project.')
+ end
+ end
+
+ context 'user is allowed to push to protected branch' do
+ before do
+ allow(user_access)
+ .to receive(:can_push_to_branch?)
+ .and_return(true)
+ end
+
+ it 'does not raise an error' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+ end
+
+ context 'protected branch creation feature is enabled' do
+ context 'user is not allowed to create protected branches' do
+ before do
+ allow(user_access)
+ .to receive(:can_merge_to_branch?)
+ .with('feature')
+ .and_return(false)
+ end
+
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to create protected branches on this project.')
+ end
+ end
+
+ context 'user is allowed to create protected branches' do
+ before do
+ allow(user_access)
+ .to receive(:can_merge_to_branch?)
+ .with('feature')
+ .and_return(true)
+
+ allow(project.repository)
+ .to receive(:branch_names_contains_sha)
+ .with(newrev)
+ .and_return(['branch'])
+ end
+
+ context "newrev isn't in any protected branches" do
+ before do
+ allow(ProtectedBranch)
+ .to receive(:any_protected?)
+ .with(project, ['branch'])
+ .and_return(false)
+ end
+
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only use an existing protected branch ref as the basis of a new protected branch.')
+ end
+ end
+
+ context 'newrev is included in a protected branch' do
+ before do
+ allow(ProtectedBranch)
+ .to receive(:any_protected?)
+ .with(project, ['branch'])
+ .and_return(true)
+ end
+
+ context 'via web interface' do
+ let(:protocol) { 'web' }
+
+ it 'allows branch creation' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+
+ context 'via SSH' do
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only create protected branches using the web interface and API.')
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/build/policy/changes_spec.rb b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
index dc3329061d1..92cf0376c02 100644
--- a/spec/lib/gitlab/ci/build/policy/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
@@ -133,7 +133,7 @@ describe Gitlab::Ci::Build::Policy::Changes do
let(:seed) { double('build seed', to_resource: ci_build) }
context 'when source is merge request' do
- let(:source) { :merge_request }
+ let(:source) { :merge_request_event }
let(:merge_request) do
create(:merge_request,
diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
index 553fc0fb9bf..b4ddbf89b70 100644
--- a/spec/lib/gitlab/ci/build/policy/refs_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
@@ -68,6 +68,20 @@ describe Gitlab::Ci::Build::Policy::Refs do
expect(described_class.new(%w[triggers]))
.not_to be_satisfied_by(pipeline)
end
+
+ context 'when source is merge_request_event' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, source: :merge_request_event) }
+
+ it 'is satisfied with only: merge_request' do
+ expect(described_class.new(%w[merge_requests]))
+ .to be_satisfied_by(pipeline)
+ end
+
+ it 'is not satisfied with only: merge_request_event' do
+ expect(described_class.new(%w[merge_request_events]))
+ .not_to be_satisfied_by(pipeline)
+ end
+ end
end
context 'when matching a ref by a regular expression' do
diff --git a/spec/lib/gitlab/ci/build/policy/variables_spec.rb b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
index c2c0742efc3..9b016901a20 100644
--- a/spec/lib/gitlab/ci/build/policy/variables_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/variables_spec.rb
@@ -15,6 +15,7 @@ describe Gitlab::Ci::Build::Policy::Variables do
before do
pipeline.variables.build(key: 'CI_PROJECT_NAME', value: '')
+ pipeline.variables.build(key: 'MY_VARIABLE', value: 'my-var')
end
describe '#satisfied_by?' do
@@ -24,6 +25,12 @@ describe Gitlab::Ci::Build::Policy::Variables do
expect(policy).to be_satisfied_by(pipeline, seed)
end
+ it 'is satisfied by a matching pipeline variable' do
+ policy = described_class.new(['$MY_VARIABLE'])
+
+ expect(policy).to be_satisfied_by(pipeline, seed)
+ end
+
it 'is not satisfied by an overridden empty variable' do
policy = described_class.new(['$CI_PROJECT_NAME'])
@@ -68,5 +75,19 @@ describe Gitlab::Ci::Build::Policy::Variables do
expect(pipeline).not_to be_persisted
expect(seed.to_resource).not_to be_persisted
end
+
+ context 'when a bridge job is used' do
+ let(:bridge) do
+ build(:ci_bridge, pipeline: pipeline, project: project, ref: 'master')
+ end
+
+ let(:seed) { double('bridge seed', to_resource: bridge) }
+
+ it 'is satisfied by a matching expression for a bridge job' do
+ policy = described_class.new(['$MY_VARIABLE'])
+
+ expect(policy).to be_satisfied_by(pipeline, seed)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/base_spec.rb b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
index 1a6b3587599..fa39b32d7ab 100644
--- a/spec/lib/gitlab/ci/config/external/file/base_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
@@ -3,7 +3,7 @@
require 'fast_spec_helper'
describe Gitlab::Ci::Config::External::File::Base do
- let(:context) { described_class::Context.new(nil, 'HEAD', nil) }
+ let(:context) { described_class::Context.new(nil, 'HEAD', nil, Set.new) }
let(:test_class) do
Class.new(described_class) do
@@ -79,4 +79,20 @@ describe Gitlab::Ci::Config::External::File::Base do
end
end
end
+
+ describe '#to_hash' do
+ context 'with includes' do
+ let(:location) { 'some/file/config.yml' }
+ let(:content) { 'include: { template: Bash.gitlab-ci.yml }'}
+
+ before do
+ allow_any_instance_of(test_class)
+ .to receive(:content).and_return(content)
+ end
+
+ it 'does expand hash to include the template' do
+ expect(subject.to_hash).to include(:before_script)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index ff67a765da0..dc14b07287e 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -4,8 +4,10 @@ require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Local do
set(:project) { create(:project, :repository) }
+ set(:user) { create(:user) }
- let(:context) { described_class::Context.new(project, '12345', nil) }
+ let(:sha) { '12345' }
+ let(:context) { described_class::Context.new(project, sha, user, Set.new) }
let(:params) { { local: location } }
let(:local_file) { described_class.new(params, context) }
@@ -103,4 +105,36 @@ describe Gitlab::Ci::Config::External::File::Local do
expect(local_file.error_message).to eq("Local file `#{location}` does not exist!")
end
end
+
+ describe '#expand_context' do
+ let(:location) { 'location.yml' }
+
+ subject { local_file.send(:expand_context) }
+
+ it 'inherits project, user and sha' do
+ is_expected.to include(user: user, project: project, sha: sha)
+ end
+ end
+
+ describe '#to_hash' do
+ context 'properly includes another local file in the same repository' do
+ let(:location) { 'some/file/config.yml' }
+ let(:content) { 'include: { local: another-config.yml }'}
+
+ let(:another_location) { 'another-config.yml' }
+ let(:another_content) { 'rspec: JOB' }
+
+ before do
+ allow(project.repository).to receive(:blob_data_at).with(sha, location)
+ .and_return(content)
+
+ allow(project.repository).to receive(:blob_data_at).with(sha, another_location)
+ .and_return(another_content)
+ end
+
+ it 'does expand hash to include the template' do
+ expect(local_file.to_hash).to include(:rspec)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
index 11809adcaf6..6e89bb1b30f 100644
--- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
@@ -3,12 +3,13 @@
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Project do
+ set(:context_project) { create(:project) }
set(:project) { create(:project, :repository) }
set(:user) { create(:user) }
let(:context_user) { user }
- let(:context) { described_class::Context.new(nil, '12345', context_user) }
- let(:subject) { described_class.new(params, context) }
+ let(:context) { described_class::Context.new(context_project, '12345', context_user, Set.new) }
+ let(:project_file) { described_class.new(params, context) }
before do
project.add_developer(user)
@@ -19,7 +20,7 @@ describe Gitlab::Ci::Config::External::File::Project do
let(:params) { { file: 'file.yml', project: 'project' } }
it 'should return true' do
- expect(subject).to be_matching
+ expect(project_file).to be_matching
end
end
@@ -27,7 +28,7 @@ describe Gitlab::Ci::Config::External::File::Project do
let(:params) { { file: 'file.yml' } }
it 'should return false' do
- expect(subject).not_to be_matching
+ expect(project_file).not_to be_matching
end
end
@@ -35,7 +36,7 @@ describe Gitlab::Ci::Config::External::File::Project do
let(:params) { { project: 'project' } }
it 'should return false' do
- expect(subject).not_to be_matching
+ expect(project_file).not_to be_matching
end
end
@@ -43,7 +44,7 @@ describe Gitlab::Ci::Config::External::File::Project do
let(:params) { {} }
it 'should return false' do
- expect(subject).not_to be_matching
+ expect(project_file).not_to be_matching
end
end
end
@@ -61,15 +62,15 @@ describe Gitlab::Ci::Config::External::File::Project do
end
it 'should return true' do
- expect(subject).to be_valid
+ expect(project_file).to be_valid
end
context 'when user does not have permission to access file' do
let(:context_user) { create(:user) }
it 'should return false' do
- expect(subject).not_to be_valid
- expect(subject.error_message).to include("Project `#{project.full_path}` not found or access denied!")
+ expect(project_file).not_to be_valid
+ expect(project_file.error_message).to include("Project `#{project.full_path}` not found or access denied!")
end
end
end
@@ -86,7 +87,7 @@ describe Gitlab::Ci::Config::External::File::Project do
end
it 'should return true' do
- expect(subject).to be_valid
+ expect(project_file).to be_valid
end
end
@@ -102,8 +103,8 @@ describe Gitlab::Ci::Config::External::File::Project do
end
it 'should return false' do
- expect(subject).not_to be_valid
- expect(subject.error_message).to include("Project `#{project.full_path}` file `/file.yml` is empty!")
+ expect(project_file).not_to be_valid
+ expect(project_file.error_message).to include("Project `#{project.full_path}` file `/file.yml` is empty!")
end
end
@@ -113,8 +114,8 @@ describe Gitlab::Ci::Config::External::File::Project do
end
it 'should return false' do
- expect(subject).not_to be_valid
- expect(subject.error_message).to include("Project `#{project.full_path}` reference `I-Do-Not-Exist` does not exist!")
+ expect(project_file).not_to be_valid
+ expect(project_file.error_message).to include("Project `#{project.full_path}` reference `I-Do-Not-Exist` does not exist!")
end
end
@@ -124,8 +125,8 @@ describe Gitlab::Ci::Config::External::File::Project do
end
it 'should return false' do
- expect(subject).not_to be_valid
- expect(subject.error_message).to include("Project `#{project.full_path}` file `/invalid-file.yml` does not exist!")
+ expect(project_file).not_to be_valid
+ expect(project_file.error_message).to include("Project `#{project.full_path}` file `/invalid-file.yml` does not exist!")
end
end
@@ -135,12 +136,22 @@ describe Gitlab::Ci::Config::External::File::Project do
end
it 'should return false' do
- expect(subject).not_to be_valid
- expect(subject.error_message).to include('Included file `/invalid-file` does not have YAML extension!')
+ expect(project_file).not_to be_valid
+ expect(project_file.error_message).to include('Included file `/invalid-file` does not have YAML extension!')
end
end
end
+ describe '#expand_context' do
+ let(:params) { { file: 'file.yml', project: project.full_path, ref: 'master' } }
+
+ subject { project_file.send(:expand_context) }
+
+ it 'inherits user, and target project and sha' do
+ is_expected.to include(user: user, project: project, sha: project.commit('master').id)
+ end
+ end
+
private
def stub_project_blob(ref, path)
diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
index 3e0fda9c308..c5b32c29759 100644
--- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Remote do
- let(:context) { described_class::Context.new(nil, '12345', nil) }
+ let(:context) { described_class::Context.new(nil, '12345', nil, Set.new) }
let(:params) { { remote: location } }
let(:remote_file) { described_class.new(params, context) }
let(:location) { 'https://gitlab.com/gitlab-org/gitlab-ce/blob/1234/.gitlab-ci-1.yml' }
@@ -181,4 +181,14 @@ describe Gitlab::Ci::Config::External::File::Remote do
end
end
end
+
+ describe '#expand_context' do
+ let(:params) { { remote: 'http://remote' } }
+
+ subject { remote_file.send(:expand_context) }
+
+ it 'drops all parameters' do
+ is_expected.to include(user: nil, project: nil, sha: nil)
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/template_spec.rb b/spec/lib/gitlab/ci/config/external/file/template_spec.rb
index 1fb5655309a..8ecaf4800f8 100644
--- a/spec/lib/gitlab/ci/config/external/file/template_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/template_spec.rb
@@ -3,18 +3,21 @@
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Template do
- let(:context) { described_class::Context.new(nil, '12345') }
+ set(:project) { create(:project) }
+ set(:user) { create(:user) }
+
+ let(:context) { described_class::Context.new(project, '12345', user, Set.new) }
let(:template) { 'Auto-DevOps.gitlab-ci.yml' }
let(:params) { { template: template } }
- subject { described_class.new(params, context) }
+ let(:template_file) { described_class.new(params, context) }
describe '#matching?' do
context 'when a template is specified' do
let(:params) { { template: 'some-template' } }
it 'should return true' do
- expect(subject).to be_matching
+ expect(template_file).to be_matching
end
end
@@ -22,7 +25,7 @@ describe Gitlab::Ci::Config::External::File::Template do
let(:params) { { template: nil } }
it 'should return false' do
- expect(subject).not_to be_matching
+ expect(template_file).not_to be_matching
end
end
@@ -30,7 +33,7 @@ describe Gitlab::Ci::Config::External::File::Template do
let(:params) { {} }
it 'should return false' do
- expect(subject).not_to be_matching
+ expect(template_file).not_to be_matching
end
end
end
@@ -40,7 +43,7 @@ describe Gitlab::Ci::Config::External::File::Template do
let(:template) { 'Auto-DevOps.gitlab-ci.yml' }
it 'should return true' do
- expect(subject).to be_valid
+ expect(template_file).to be_valid
end
end
@@ -48,8 +51,8 @@ describe Gitlab::Ci::Config::External::File::Template do
let(:template) { 'Template.yml' }
it 'should return false' do
- expect(subject).not_to be_valid
- expect(subject.error_message).to include('Template file `Template.yml` is not a valid location!')
+ expect(template_file).not_to be_valid
+ expect(template_file.error_message).to include('Template file `Template.yml` is not a valid location!')
end
end
@@ -57,14 +60,14 @@ describe Gitlab::Ci::Config::External::File::Template do
let(:template) { 'I-Do-Not-Have-This-Template.gitlab-ci.yml' }
it 'should return false' do
- expect(subject).not_to be_valid
- expect(subject.error_message).to include('Included file `I-Do-Not-Have-This-Template.gitlab-ci.yml` is empty or does not exist!')
+ expect(template_file).not_to be_valid
+ expect(template_file.error_message).to include('Included file `I-Do-Not-Have-This-Template.gitlab-ci.yml` is empty or does not exist!')
end
end
end
describe '#template_name' do
- let(:template_name) { subject.send(:template_name) }
+ let(:template_name) { template_file.send(:template_name) }
context 'when template does end with .gitlab-ci.yml' do
let(:template) { 'my-template.gitlab-ci.yml' }
@@ -90,4 +93,14 @@ describe Gitlab::Ci::Config::External::File::Template do
end
end
end
+
+ describe '#expand_context' do
+ let(:location) { 'location.yml' }
+
+ subject { template_file.send(:expand_context) }
+
+ it 'drops all parameters' do
+ is_expected.to include(user: nil, project: nil, sha: nil)
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 4cab4961b0f..136974569de 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -9,6 +9,7 @@ describe Gitlab::Ci::Config::External::Mapper do
let(:local_file) { '/lib/gitlab/ci/templates/non-existent-file.yml' }
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-ce/blob/1234/.gitlab-ci-1.yml' }
let(:template_file) { 'Auto-DevOps.gitlab-ci.yml' }
+ let(:expandset) { Set.new }
let(:file_content) do
<<~HEREDOC
@@ -21,7 +22,7 @@ describe Gitlab::Ci::Config::External::Mapper do
end
describe '#process' do
- subject { described_class.new(values, project: project, sha: '123456', user: user).process }
+ subject { described_class.new(values, project: project, sha: '123456', user: user, expandset: expandset).process }
context "when single 'include' keyword is defined" do
context 'when the string is a local file' do
@@ -141,5 +142,37 @@ describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to be_empty
end
end
+
+ context "when duplicate 'include' is defined" do
+ let(:values) do
+ { include: [
+ { 'local' => local_file },
+ { 'local' => local_file }
+ ],
+ image: 'ruby:2.2' }
+ end
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(described_class::DuplicateIncludesError)
+ end
+ end
+
+ context "when too many 'includes' are defined" do
+ let(:values) do
+ { include: [
+ { 'local' => local_file },
+ { 'remote' => remote_url }
+ ],
+ image: 'ruby:2.2' }
+ end
+
+ before do
+ stub_const("#{described_class}::MAX_INCLUDES", 1)
+ end
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(described_class::TooManyIncludesError)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 1ac58139b25..3f6f6d7c5d9 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -4,15 +4,20 @@ require 'spec_helper'
describe Gitlab::Ci::Config::External::Processor do
set(:project) { create(:project, :repository) }
+ set(:another_project) { create(:project, :repository) }
set(:user) { create(:user) }
- let(:processor) { described_class.new(values, project: project, sha: '12345', user: user) }
+ let(:expandset) { Set.new }
+ let(:sha) { '12345' }
+ let(:processor) { described_class.new(values, project: project, sha: '12345', user: user, expandset: expandset) }
before do
project.add_developer(user)
end
describe "#perform" do
+ subject { processor.perform }
+
context 'when no external files defined' do
let(:values) { { image: 'ruby:2.2' } }
@@ -190,5 +195,80 @@ describe Gitlab::Ci::Config::External::Processor do
expect(processor.perform[:image]).to eq('ruby:2.2')
end
end
+
+ context "when a nested includes are defined" do
+ let(:values) do
+ {
+ include: [
+ { local: '/local/file.yml' }
+ ],
+ image: 'ruby:2.2'
+ }
+ end
+
+ before do
+ allow(project.repository).to receive(:blob_data_at).with('12345', '/local/file.yml') do
+ <<~HEREDOC
+ include:
+ - template: Ruby.gitlab-ci.yml
+ - remote: http://my.domain.com/config.yml
+ - project: #{another_project.full_path}
+ file: /templates/my-workflow.yml
+ HEREDOC
+ end
+
+ allow_any_instance_of(Repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-workflow.yml') do
+ <<~HEREDOC
+ include:
+ - local: /templates/my-build.yml
+ HEREDOC
+ end
+
+ allow_any_instance_of(Repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-build.yml') do
+ <<~HEREDOC
+ my_build:
+ script: echo Hello World
+ HEREDOC
+ end
+
+ WebMock.stub_request(:get, 'http://my.domain.com/config.yml').to_return(body: 'remote_build: { script: echo Hello World }')
+ end
+
+ context 'when project is public' do
+ before do
+ another_project.update!(visibility: 'public')
+ end
+
+ it 'properly expands all includes' do
+ is_expected.to include(:my_build, :remote_build, :rspec)
+ end
+ end
+
+ context 'when user is reporter of another project' do
+ before do
+ another_project.add_reporter(user)
+ end
+
+ it 'properly expands all includes' do
+ is_expected.to include(:my_build, :remote_build, :rspec)
+ end
+ end
+
+ context 'when user is not allowed' do
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Ci::Config::External::Processor::IncludeError, /not found or access denied/)
+ end
+ end
+
+ context 'when too many includes is included' do
+ before do
+ stub_const('Gitlab::Ci::Config::External::Mapper::MAX_INCLUDES', 1)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Ci::Config::External::Processor::IncludeError, /Maximum of 1 nested/)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
index c9d1d09a938..3debd42ac65 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -96,7 +96,7 @@ describe Gitlab::Ci::Pipeline::Chain::Build do
context 'when pipeline is running for a merge request' do
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
- source: :merge_request,
+ source: :merge_request_event,
origin_ref: 'feature',
checkout_sha: project.commit.id,
after_sha: nil,
@@ -117,7 +117,7 @@ describe Gitlab::Ci::Pipeline::Chain::Build do
end
it 'correctly indicated that this is a merge request pipeline' do
- expect(pipeline).to be_merge_request
+ expect(pipeline).to be_merge_request_event
expect(pipeline.merge_request).to eq(merge_request)
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
index 053bc421649..e6c6a82b463 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
@@ -115,7 +115,7 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Config do
let(:pipeline) { build_stubbed(:ci_pipeline, project: project) }
let(:merge_request_pipeline) do
- build(:ci_pipeline, source: :merge_request, project: project)
+ build(:ci_pipeline, source: :merge_request_event, project: project)
end
let(:chain) { described_class.new(merge_request_pipeline, command).tap(&:perform!) }
diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb
index 0dd74399a47..fbbd58280a9 100644
--- a/spec/lib/gitlab/ci/templates/templates_spec.rb
+++ b/spec/lib/gitlab/ci/templates/templates_spec.rb
@@ -3,9 +3,40 @@
require 'spec_helper'
describe "CI YML Templates" do
- Gitlab::Template::GitlabCiYmlTemplate.all.each do |template|
- it "#{template.name} should be valid" do
- expect { Gitlab::Ci::YamlProcessor.new(template.content) }.not_to raise_error
+ ABSTRACT_TEMPLATES = %w[Serverless].freeze
+
+ def self.concrete_templates
+ Gitlab::Template::GitlabCiYmlTemplate.all.reject do |template|
+ ABSTRACT_TEMPLATES.include?(template.name)
+ end
+ end
+
+ def self.abstract_templates
+ Gitlab::Template::GitlabCiYmlTemplate.all.select do |template|
+ ABSTRACT_TEMPLATES.include?(template.name)
+ end
+ end
+
+ describe 'concrete templates with CI/CD jobs' do
+ concrete_templates.each do |template|
+ it "#{template.name} template should be valid" do
+ expect { Gitlab::Ci::YamlProcessor.new(template.content) }
+ .not_to raise_error
+ end
+ end
+ end
+
+ describe 'abstract templates without concrete jobs defined' do
+ abstract_templates.each do |template|
+ it "#{template.name} template should be valid after being implemented" do
+ content = template.content + <<~EOS
+ concrete_build_implemented_by_a_user:
+ stage: build
+ script: do something
+ EOS
+
+ expect { Gitlab::Ci::YamlProcessor.new(content) }.not_to raise_error
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/variables/collection/item_spec.rb b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
index 8bf44acb228..3ff2fe18c15 100644
--- a/spec/lib/gitlab/ci/variables/collection/item_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
@@ -6,7 +6,7 @@ describe Gitlab::Ci::Variables::Collection::Item do
let(:expected_value) { variable_value }
let(:variable) do
- { key: variable_key, value: variable_value, public: true }
+ { key: variable_key, value: variable_value, public: true, masked: false }
end
describe '.new' do
@@ -88,7 +88,7 @@ describe Gitlab::Ci::Variables::Collection::Item do
resource = described_class.fabricate(variable)
expect(resource).to be_a(described_class)
- expect(resource).to eq(key: 'CI_VAR', value: '123', public: false)
+ expect(resource).to eq(key: 'CI_VAR', value: '123', public: false, masked: false)
end
it 'supports using another collection item' do
@@ -134,7 +134,21 @@ describe Gitlab::Ci::Variables::Collection::Item do
.to_runner_variable
expect(runner_variable)
- .to eq(key: 'VAR', value: 'value', public: true, file: true)
+ .to eq(key: 'VAR', value: 'value', public: true, file: true, masked: false)
+ end
+ end
+
+ context 'when variable masking is disabled' do
+ before do
+ stub_feature_flags(variable_masking: false)
+ end
+
+ it 'does not expose the masked field to the runner' do
+ runner_variable = described_class
+ .new(key: 'VAR', value: 'value', masked: true)
+ .to_runner_variable
+
+ expect(runner_variable).to eq(key: 'VAR', value: 'value', public: true)
end
end
end
diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb
index 5c91816a586..8e732d44d5d 100644
--- a/spec/lib/gitlab/ci/variables/collection_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe Gitlab::Ci::Variables::Collection do
describe '.new' do
it 'can be initialized with an array' do
- variable = { key: 'VAR', value: 'value', public: true }
+ variable = { key: 'VAR', value: 'value', public: true, masked: false }
collection = described_class.new([variable])
@@ -66,6 +66,14 @@ describe Gitlab::Ci::Variables::Collection do
expect(collection).to include(key: 'VAR_3', value: '3', public: true)
end
+ it 'does not concatenate resource if it undefined' do
+ collection = described_class.new([{ key: 'VAR_1', value: '1' }])
+
+ collection.concat(nil)
+
+ expect(collection).to be_one
+ end
+
it 'returns self' do
expect(subject.concat([key: 'VAR', value: 'test']))
.to eq subject
@@ -93,7 +101,7 @@ describe Gitlab::Ci::Variables::Collection do
collection = described_class.new([{ key: 'TEST', value: '1' }])
expect(collection.to_runner_variables)
- .to eq [{ key: 'TEST', value: '1', public: true }]
+ .to eq [{ key: 'TEST', value: '1', public: true, masked: false }]
end
end
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index caf9fc5442c..17d5eae24f5 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -143,7 +143,7 @@ describe Gitlab::CurrentSettings do
it_behaves_like 'a non-persisted ApplicationSetting object'
- it 'uses the value from the DB attribute if present and not overriden by an accessor' do
+ it 'uses the value from the DB attribute if present and not overridden by an accessor' do
expect(current_settings.home_page_url).to eq(db_settings.home_page_url)
end
end
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index 00cb1e6446a..66cd8171c12 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -265,6 +265,7 @@ describe Gitlab::Danger::Helper do
'changelogs/foo' | :none
'ee/changelogs/foo' | :none
+ 'locale/gitlab.pot' | :none
'FOO' | :unknown
'foo' | :unknown
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 248cca25a2c..81419e51635 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -19,7 +19,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete
Project.find(project.id)
end
- describe "#remove_last_ocurrence" do
+ describe "#remove_last_occurrence" do
it "removes only the last occurrence of a string" do
input = "this/is/a-word-to-replace/namespace/with/a-word-to-replace"
diff --git a/spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb b/spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb
index 4d222564fd0..0ebd8994636 100644
--- a/spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/composer_json_linker_spec.rb
@@ -50,8 +50,8 @@ describe Gitlab::DependencyLinker::ComposerJsonLinker do
%{<a href="#{url}" rel="nofollow noreferrer noopener" target="_blank">#{name}</a>}
end
- it 'links the module name' do
- expect(subject).to include(link('laravel/laravel', 'https://packagist.org/packages/laravel/laravel'))
+ it 'does not link the module name' do
+ expect(subject).not_to include(link('laravel/laravel', 'https://packagist.org/packages/laravel/laravel'))
end
it 'links the homepage' do
diff --git a/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb b/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb
index a97803b119e..f00f6b47b94 100644
--- a/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/gemfile_linker_spec.rb
@@ -41,13 +41,16 @@ describe Gitlab::DependencyLinker::GemfileLinker do
end
it 'links dependencies' do
- expect(subject).to include(link('rails', 'https://rubygems.org/gems/rails'))
expect(subject).to include(link('rails-deprecated_sanitizer', 'https://rubygems.org/gems/rails-deprecated_sanitizer'))
- expect(subject).to include(link('responders', 'https://rubygems.org/gems/responders'))
- expect(subject).to include(link('sprockets', 'https://rubygems.org/gems/sprockets'))
expect(subject).to include(link('default_value_for', 'https://rubygems.org/gems/default_value_for'))
end
+ it 'links to external dependencies' do
+ expect(subject).to include(link('rails', 'https://github.com/rails/rails'))
+ expect(subject).to include(link('responders', 'https://github.com/rails/responders'))
+ expect(subject).to include(link('sprockets', 'https://gitlab.example.com/gems/sprockets'))
+ end
+
it 'links GitHub repos' do
expect(subject).to include(link('rails/rails', 'https://github.com/rails/rails'))
expect(subject).to include(link('rails/responders', 'https://github.com/rails/responders'))
diff --git a/spec/lib/gitlab/dependency_linker/gemspec_linker_spec.rb b/spec/lib/gitlab/dependency_linker/gemspec_linker_spec.rb
index 24ad7d12f4c..6c6a5d70576 100644
--- a/spec/lib/gitlab/dependency_linker/gemspec_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/gemspec_linker_spec.rb
@@ -43,8 +43,8 @@ describe Gitlab::DependencyLinker::GemspecLinker do
%{<a href="#{url}" rel="nofollow noreferrer noopener" target="_blank">#{name}</a>}
end
- it 'links the gem name' do
- expect(subject).to include(link('gitlab_git', 'https://rubygems.org/gems/gitlab_git'))
+ it 'does not link the gem name' do
+ expect(subject).not_to include(link('gitlab_git', 'https://rubygems.org/gems/gitlab_git'))
end
it 'links the license' do
diff --git a/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb b/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb
index 1e8b72afb7b..9050127af7f 100644
--- a/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/package_json_linker_spec.rb
@@ -33,7 +33,8 @@ describe Gitlab::DependencyLinker::PackageJsonLinker do
"express": "4.2.x",
"bigpipe": "bigpipe/pagelet",
"plates": "https://github.com/flatiron/plates/tarball/master",
- "karma": "^1.4.1"
+ "karma": "^1.4.1",
+ "random": "git+https://EdOverflow@github.com/example/example.git"
},
"devDependencies": {
"vows": "^0.7.0",
@@ -51,8 +52,8 @@ describe Gitlab::DependencyLinker::PackageJsonLinker do
%{<a href="#{url}" rel="nofollow noreferrer noopener" target="_blank">#{name}</a>}
end
- it 'links the module name' do
- expect(subject).to include(link('module-name', 'https://npmjs.com/package/module-name'))
+ it 'does not link the module name' do
+ expect(subject).not_to include(link('module-name', 'https://npmjs.com/package/module-name'))
end
it 'links the homepage' do
@@ -71,14 +72,21 @@ describe Gitlab::DependencyLinker::PackageJsonLinker do
expect(subject).to include(link('primus', 'https://npmjs.com/package/primus'))
expect(subject).to include(link('async', 'https://npmjs.com/package/async'))
expect(subject).to include(link('express', 'https://npmjs.com/package/express'))
- expect(subject).to include(link('bigpipe', 'https://npmjs.com/package/bigpipe'))
- expect(subject).to include(link('plates', 'https://npmjs.com/package/plates'))
expect(subject).to include(link('karma', 'https://npmjs.com/package/karma'))
expect(subject).to include(link('vows', 'https://npmjs.com/package/vows'))
expect(subject).to include(link('assume', 'https://npmjs.com/package/assume'))
expect(subject).to include(link('pre-commit', 'https://npmjs.com/package/pre-commit'))
end
+ it 'links dependencies to URL detected on value' do
+ expect(subject).to include(link('bigpipe', 'https://github.com/bigpipe/pagelet'))
+ expect(subject).to include(link('plates', 'https://github.com/flatiron/plates/tarball/master'))
+ end
+
+ it 'does not link to NPM when invalid git URL' do
+ expect(subject).not_to include(link('random', 'https://npmjs.com/package/random'))
+ end
+
it 'links GitHub repos' do
expect(subject).to include(link('bigpipe/pagelet', 'https://github.com/bigpipe/pagelet'))
end
diff --git a/spec/lib/gitlab/dependency_linker/parser/gemfile_spec.rb b/spec/lib/gitlab/dependency_linker/parser/gemfile_spec.rb
new file mode 100644
index 00000000000..9bfb1b13a2b
--- /dev/null
+++ b/spec/lib/gitlab/dependency_linker/parser/gemfile_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe Gitlab::DependencyLinker::Parser::Gemfile do
+ describe '#parse' do
+ let(:file_content) do
+ <<-CONTENT.strip_heredoc
+ source 'https://rubygems.org'
+
+ gem "rails", '4.2.6', github: "rails/rails"
+ gem 'rails-deprecated_sanitizer', '~> 1.0.3'
+ gem 'responders', '~> 2.0', :github => 'rails/responders'
+ gem 'sprockets', '~> 3.6.0', git: 'https://gitlab.example.com/gems/sprockets'
+ gem 'default_value_for', '~> 3.0.0'
+ CONTENT
+ end
+
+ subject { described_class.new(file_content).parse(keyword: 'gem') }
+
+ def fetch_package(name)
+ subject.find { |package| package.name == name }
+ end
+
+ it 'returns parsed packages' do
+ expect(subject.size).to eq(5)
+ expect(subject).to all(be_a(Gitlab::DependencyLinker::Package))
+ end
+
+ it 'packages respond to name and external_ref accordingly' do
+ expect(fetch_package('rails')).to have_attributes(name: 'rails',
+ github_ref: 'rails/rails',
+ git_ref: nil)
+
+ expect(fetch_package('sprockets')).to have_attributes(name: 'sprockets',
+ github_ref: nil,
+ git_ref: 'https://gitlab.example.com/gems/sprockets')
+
+ expect(fetch_package('default_value_for')).to have_attributes(name: 'default_value_for',
+ github_ref: nil,
+ git_ref: nil)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/dependency_linker/podfile_linker_spec.rb b/spec/lib/gitlab/dependency_linker/podfile_linker_spec.rb
index cdfd7ad9826..8f1b523653e 100644
--- a/spec/lib/gitlab/dependency_linker/podfile_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/podfile_linker_spec.rb
@@ -43,7 +43,10 @@ describe Gitlab::DependencyLinker::PodfileLinker do
it 'links packages' do
expect(subject).to include(link('AFNetworking', 'https://cocoapods.org/pods/AFNetworking'))
- expect(subject).to include(link('Interstellar/Core', 'https://cocoapods.org/pods/Interstellar'))
+ end
+
+ it 'links external packages' do
+ expect(subject).to include(link('Interstellar/Core', 'https://github.com/ashfurrow/Interstellar.git'))
end
it 'links Git repos' do
diff --git a/spec/lib/gitlab/dependency_linker/podspec_linker_spec.rb b/spec/lib/gitlab/dependency_linker/podspec_linker_spec.rb
index ed60ab45955..bacec830103 100644
--- a/spec/lib/gitlab/dependency_linker/podspec_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker/podspec_linker_spec.rb
@@ -42,8 +42,8 @@ describe Gitlab::DependencyLinker::PodspecLinker do
%{<a href="#{url}" rel="nofollow noreferrer noopener" target="_blank">#{name}</a>}
end
- it 'links the gem name' do
- expect(subject).to include(link('Reachability', 'https://cocoapods.org/pods/Reachability'))
+ it 'does not link the pod name' do
+ expect(subject).not_to include(link('Reachability', 'https://cocoapods.org/pods/Reachability'))
end
it 'links the license' do
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 862590268ca..611c3e946ed 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -8,6 +8,47 @@ describe Gitlab::Diff::File do
let(:diff) { commit.raw_diffs.first }
let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
+ def create_file(file_name, content)
+ Files::CreateService.new(
+ project,
+ project.owner,
+ commit_message: 'Update',
+ start_branch: branch_name,
+ branch_name: branch_name,
+ file_path: file_name,
+ file_content: content
+ ).execute
+
+ project.commit(branch_name).diffs.diff_files.first
+ end
+
+ def update_file(file_name, content)
+ Files::UpdateService.new(
+ project,
+ project.owner,
+ commit_message: 'Update',
+ start_branch: branch_name,
+ branch_name: branch_name,
+ file_path: file_name,
+ file_content: content
+ ).execute
+
+ project.commit(branch_name).diffs.diff_files.first
+ end
+
+ def delete_file(file_name)
+ Files::DeleteService.new(
+ project,
+ project.owner,
+ commit_message: 'Update',
+ start_branch: branch_name,
+ branch_name: branch_name,
+ file_path: file_name
+ ).execute
+
+ project.commit(branch_name).diffs.diff_files.first
+ end
+
describe '#diff_lines' do
let(:diff_lines) { diff_file.diff_lines }
@@ -675,47 +716,6 @@ describe Gitlab::Diff::File do
end
let(:branch_name) { 'master' }
- def create_file(file_name, content)
- Files::CreateService.new(
- project,
- project.owner,
- commit_message: 'Update',
- start_branch: branch_name,
- branch_name: branch_name,
- file_path: file_name,
- file_content: content
- ).execute
-
- project.commit(branch_name).diffs.diff_files.first
- end
-
- def update_file(file_name, content)
- Files::UpdateService.new(
- project,
- project.owner,
- commit_message: 'Update',
- start_branch: branch_name,
- branch_name: branch_name,
- file_path: file_name,
- file_content: content
- ).execute
-
- project.commit(branch_name).diffs.diff_files.first
- end
-
- def delete_file(file_name)
- Files::DeleteService.new(
- project,
- project.owner,
- commit_message: 'Update',
- start_branch: branch_name,
- branch_name: branch_name,
- file_path: file_name
- ).execute
-
- project.commit(branch_name).diffs.diff_files.first
- end
-
context 'when empty file is created' do
it 'returns true' do
diff_file = create_file('empty.md', '')
@@ -751,4 +751,123 @@ describe Gitlab::Diff::File do
end
end
end
+
+ describe '#fully_expanded?' do
+ let(:project) do
+ create(:project, :custom_repo, files: {})
+ end
+ let(:branch_name) { 'master' }
+
+ context 'when empty file is created' do
+ it 'returns true' do
+ diff_file = create_file('empty.md', '')
+
+ expect(diff_file.fully_expanded?).to be_truthy
+ end
+ end
+
+ context 'when empty file is deleted' do
+ it 'returns true' do
+ create_file('empty.md', '')
+ diff_file = delete_file('empty.md')
+
+ expect(diff_file.fully_expanded?).to be_truthy
+ end
+ end
+
+ context 'when short file with last line removed' do
+ it 'returns true' do
+ create_file('with-content.md', (1..3).to_a.join("\n"))
+ diff_file = update_file('with-content.md', (1..2).to_a.join("\n"))
+
+ expect(diff_file.fully_expanded?).to be_truthy
+ end
+ end
+
+ context 'when a single line is added to empty file' do
+ it 'returns true' do
+ create_file('empty.md', '')
+ diff_file = update_file('empty.md', 'new content')
+
+ expect(diff_file.fully_expanded?).to be_truthy
+ end
+ end
+
+ context 'when single line file is changed' do
+ it 'returns true' do
+ create_file('file.md', 'foo')
+ diff_file = update_file('file.md', 'bar')
+
+ expect(diff_file.fully_expanded?).to be_truthy
+ end
+ end
+
+ context 'when long file is changed' do
+ before do
+ create_file('file.md', (1..999).to_a.join("\n"))
+ end
+
+ context 'when first line is removed' do
+ it 'returns true' do
+ diff_file = update_file('file.md', (2..999).to_a.join("\n"))
+
+ expect(diff_file.fully_expanded?).to be_falsey
+ end
+ end
+
+ context 'when last line is removed' do
+ it 'returns true' do
+ diff_file = update_file('file.md', (1..998).to_a.join("\n"))
+
+ expect(diff_file.fully_expanded?).to be_falsey
+ end
+ end
+
+ context 'when first and last lines are removed' do
+ it 'returns false' do
+ diff_file = update_file('file.md', (2..998).to_a.join("\n"))
+
+ expect(diff_file.fully_expanded?).to be_falsey
+ end
+ end
+
+ context 'when first and last lines are changed' do
+ it 'returns false' do
+ content = (2..998).to_a
+ content.prepend('a')
+ content.append('z')
+ content = content.join("\n")
+
+ diff_file = update_file('file.md', content)
+
+ expect(diff_file.fully_expanded?).to be_falsey
+ end
+ end
+
+ context 'when every line are changed' do
+ it 'returns true' do
+ diff_file = update_file('file.md', "hi\n" * 999)
+
+ expect(diff_file.fully_expanded?).to be_truthy
+ end
+ end
+
+ context 'when all contents are cleared' do
+ it 'returns true' do
+ diff_file = update_file('file.md', "")
+
+ expect(diff_file.fully_expanded?).to be_truthy
+ end
+ end
+
+ context 'when file is binary' do
+ it 'returns true' do
+ diff_file = update_file('file.md', (1..998).to_a.join("\n"))
+ allow(diff_file).to receive(:binary?).and_return(true)
+
+ expect(diff_file.fully_expanded?).to be_truthy
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb
index a1b5cea88c0..10bc82e24d1 100644
--- a/spec/lib/gitlab/git/blob_spec.rb
+++ b/spec/lib/gitlab/git/blob_spec.rb
@@ -18,7 +18,7 @@ describe Gitlab::Git::Blob, :seed_helper do
end
end
- describe '.find' do
+ shared_examples '.find' do
context 'nil path' do
let(:blob) { Gitlab::Git::Blob.find(repository, SeedRepo::Commit::ID, nil) }
@@ -128,6 +128,20 @@ describe Gitlab::Git::Blob, :seed_helper do
end
end
+ describe '.find with Gitaly enabled' do
+ it_behaves_like '.find'
+ end
+
+ describe '.find with Rugged enabled', :enable_rugged do
+ it 'calls out to the Rugged implementation' do
+ allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
+
+ described_class.find(repository, SeedRepo::Commit::ID, 'files/images/6049019_460s.jpg')
+ end
+
+ it_behaves_like '.find'
+ end
+
describe '.raw' do
let(:raw_blob) { Gitlab::Git::Blob.raw(repository, SeedRepo::RubyBlob::ID) }
let(:bad_blob) { Gitlab::Git::Blob.raw(repository, SeedRepo::BigCommit::ID) }
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 2611ebed25b..3fb41a626b2 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -112,7 +112,7 @@ describe Gitlab::Git::Commit, :seed_helper do
end
context 'Class methods' do
- describe '.find' do
+ shared_examples '.find' do
it "should return first head commit if without params" do
expect(described_class.last(repository).id).to eq(
rugged_repo.head.target.oid
@@ -154,6 +154,20 @@ describe Gitlab::Git::Commit, :seed_helper do
end
end
+ describe '.find with Gitaly enabled' do
+ it_should_behave_like '.find'
+ end
+
+ describe '.find with Rugged enabled', :enable_rugged do
+ it 'calls out to the Rugged implementation' do
+ allow_any_instance_of(Rugged).to receive(:rev_parse).with(SeedRepo::Commit::ID).and_call_original
+
+ described_class.find(repository, SeedRepo::Commit::ID)
+ end
+
+ it_should_behave_like '.find'
+ end
+
describe '.last_for_path' do
context 'no path' do
subject { described_class.last_for_path(repository, 'master') }
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index e3dd02f1478..7e6dfa30e37 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -619,16 +619,6 @@ describe Gitlab::Git::Repository, :seed_helper do
repository.search_files_by_content('search-files-by-content', 'search-files-by-content-branch')
end
end
-
- it_should_behave_like 'search files by content' do
- let(:search_results) do
- repository.gitaly_repository_client.search_files_by_content(
- 'search-files-by-content-branch',
- 'search-files-by-content',
- chunked_response: false
- )
- end
- end
end
describe '#find_remote_root_ref' do
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 4a4d69490a3..60060c41616 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -3,7 +3,7 @@ require "spec_helper"
describe Gitlab::Git::Tree, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- context :repo do
+ shared_examples :repo do
let(:tree) { Gitlab::Git::Tree.where(repository, SeedRepo::Commit::ID) }
it { expect(tree).to be_kind_of Array }
@@ -12,6 +12,17 @@ describe Gitlab::Git::Tree, :seed_helper do
it { expect(tree.select(&:file?).size).to eq(10) }
it { expect(tree.select(&:submodule?).size).to eq(2) }
+ it 'returns an empty array when called with an invalid ref' do
+ expect(described_class.where(repository, 'foobar-does-not-exist')).to eq([])
+ end
+
+ it 'returns a list of tree objects' do
+ entries = described_class.where(repository, SeedRepo::Commit::ID, 'files', true)
+
+ expect(entries.count).to be > 10
+ expect(entries).to all(be_a(Gitlab::Git::Tree))
+ end
+
describe '#dir?' do
let(:dir) { tree.select(&:dir?).first }
@@ -20,8 +31,8 @@ describe Gitlab::Git::Tree, :seed_helper do
it { expect(dir.commit_id).to eq(SeedRepo::Commit::ID) }
it { expect(dir.name).to eq('encoding') }
it { expect(dir.path).to eq('encoding') }
- it { expect(dir.flat_path).to eq('encoding') }
it { expect(dir.mode).to eq('40000') }
+ it { expect(dir.flat_path).to eq('encoding') }
context :subdir do
let(:subdir) { Gitlab::Git::Tree.where(repository, SeedRepo::Commit::ID, 'files').first }
@@ -44,6 +55,51 @@ describe Gitlab::Git::Tree, :seed_helper do
it { expect(subdir_file.path).to eq('files/ruby/popen.rb') }
it { expect(subdir_file.flat_path).to eq('files/ruby/popen.rb') }
end
+
+ context :flat_path do
+ let(:filename) { 'files/flat/path/correct/content.txt' }
+ let(:oid) { create_file(filename) }
+ let(:subdir_file) { Gitlab::Git::Tree.where(repository, oid, 'files/flat').first }
+ let(:repository_rugged) { Rugged::Repository.new(File.join(SEED_STORAGE_PATH, TEST_REPO_PATH)) }
+
+ it { expect(subdir_file.flat_path).to eq('files/flat/path/correct') }
+ end
+
+ def create_file(path)
+ oid = repository_rugged.write('test', :blob)
+ index = repository_rugged.index
+ index.add(path: filename, oid: oid, mode: 0100644)
+
+ options = commit_options(
+ repository_rugged,
+ index,
+ repository_rugged.head.target,
+ 'HEAD',
+ 'Add new file')
+
+ Rugged::Commit.create(repository_rugged, options)
+ end
+
+ # Build the options hash that's passed to Rugged::Commit#create
+ def commit_options(repo, index, target, ref, message)
+ options = {}
+ options[:tree] = index.write_tree(repo)
+ options[:author] = {
+ email: "test@example.com",
+ name: "Test Author",
+ time: Time.gm(2014, "mar", 3, 20, 15, 1)
+ }
+ options[:committer] = {
+ email: "test@example.com",
+ name: "Test Author",
+ time: Time.gm(2014, "mar", 3, 20, 15, 1)
+ }
+ options[:message] ||= message
+ options[:parents] = repo.empty? ? [] : [target].compact
+ options[:update_ref] = ref
+
+ options
+ end
end
describe '#file?' do
@@ -79,9 +135,17 @@ describe Gitlab::Git::Tree, :seed_helper do
end
end
- describe '#where' do
- it 'returns an empty array when called with an invalid ref' do
- expect(described_class.where(repository, 'foobar-does-not-exist')).to eq([])
+ describe '.where with Gitaly enabled' do
+ it_behaves_like :repo
+ end
+
+ describe '.where with Rugged enabled', :enable_rugged do
+ it 'calls out to the Rugged implementation' do
+ allow_any_instance_of(Rugged).to receive(:lookup).with(SeedRepo::Commit::ID)
+
+ described_class.where(repository, SeedRepo::Commit::ID, 'files', false)
end
+
+ it_behaves_like :repo
end
end
diff --git a/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb b/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
index c89913ec8e9..bb10be2a4dc 100644
--- a/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/storage_settings_spec.rb
@@ -26,4 +26,14 @@ describe Gitlab::GitalyClient::StorageSettings do
end
end
end
+
+ describe '.disk_access_denied?' do
+ it 'return false when Rugged is enabled', :enable_rugged do
+ expect(described_class.disk_access_denied?).to be_falsey
+ end
+
+ it 'returns true' do
+ expect(described_class.disk_access_denied?).to be_truthy
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
index b1cac3b6e46..120a07ff2b3 100644
--- a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
@@ -4,6 +4,7 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis
let(:project) { create(:project, import_source: 'foo/bar') }
let(:client) { double(:client) }
let(:importer) { described_class.new(project, client) }
+ let(:due_on) { Time.new(2017, 2, 1, 12, 00) }
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
@@ -14,6 +15,20 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis
title: '1.0',
description: 'The first release',
state: 'open',
+ due_on: due_on,
+ created_at: created_at,
+ updated_at: updated_at
+ )
+ end
+
+ let(:milestone2) do
+ double(
+ :milestone,
+ number: 1,
+ title: '1.0',
+ description: 'The first release',
+ state: 'open',
+ due_on: nil,
created_at: created_at,
updated_at: updated_at
)
@@ -72,6 +87,7 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis
describe '#build' do
let(:milestone_hash) { importer.build(milestone) }
+ let(:milestone_hash2) { importer.build(milestone2) }
it 'returns the attributes of the milestone as a Hash' do
expect(milestone_hash).to be_an_instance_of(Hash)
@@ -98,6 +114,14 @@ describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis
expect(milestone_hash[:state]).to eq(:active)
end
+ it 'includes the due date' do
+ expect(milestone_hash[:due_date]).to eq(due_on.to_date)
+ end
+
+ it 'responds correctly to no due date value' do
+ expect(milestone_hash2[:due_date]).to be nil
+ end
+
it 'includes the created timestamp' do
expect(milestone_hash[:created_at]).to eq(created_at)
end
diff --git a/spec/lib/gitlab/hashed_storage/migrator_spec.rb b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
index 3942f168ceb..6154b3e2f76 100644
--- a/spec/lib/gitlab/hashed_storage/migrator_spec.rb
+++ b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
@@ -1,21 +1,29 @@
require 'spec_helper'
describe Gitlab::HashedStorage::Migrator do
- describe '#bulk_schedule' do
- it 'schedules job to StorageMigratorWorker' do
+ describe '#bulk_schedule_migration' do
+ it 'schedules job to HashedStorage::MigratorWorker' do
Sidekiq::Testing.fake! do
- expect { subject.bulk_schedule(start: 1, finish: 5) }.to change(HashedStorage::MigratorWorker.jobs, :size).by(1)
+ expect { subject.bulk_schedule_migration(start: 1, finish: 5) }.to change(HashedStorage::MigratorWorker.jobs, :size).by(1)
+ end
+ end
+ end
+
+ describe '#bulk_schedule_rollback' do
+ it 'schedules job to HashedStorage::RollbackerWorker' do
+ Sidekiq::Testing.fake! do
+ expect { subject.bulk_schedule_rollback(start: 1, finish: 5) }.to change(HashedStorage::RollbackerWorker.jobs, :size).by(1)
end
end
end
describe '#bulk_migrate' do
- let(:projects) { create_list(:project, 2, :legacy_storage) }
+ let(:projects) { create_list(:project, 2, :legacy_storage, :empty_repo) }
let(:ids) { projects.map(&:id) }
- it 'enqueue jobs to ProjectMigrateHashedStorageWorker' do
+ it 'enqueue jobs to HashedStorage::ProjectMigrateWorker' do
Sidekiq::Testing.fake! do
- expect { subject.bulk_migrate(start: ids.min, finish: ids.max) }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(2)
+ expect { subject.bulk_migrate(start: ids.min, finish: ids.max) }.to change(HashedStorage::ProjectMigrateWorker.jobs, :size).by(2)
end
end
@@ -32,13 +40,53 @@ describe Gitlab::HashedStorage::Migrator do
subject.bulk_migrate(start: ids.min, finish: ids.max)
end
- it 'has migrated projects set as writable' do
+ it 'has all projects migrated and set as writable' do
perform_enqueued_jobs do
subject.bulk_migrate(start: ids.min, finish: ids.max)
end
projects.each do |project|
- expect(project.reload.repository_read_only?).to be_falsey
+ project.reload
+
+ expect(project.hashed_storage?(:repository)).to be_truthy
+ expect(project.repository_read_only?).to be_falsey
+ end
+ end
+ end
+
+ describe '#bulk_rollback' do
+ let(:projects) { create_list(:project, 2, :empty_repo) }
+ let(:ids) { projects.map(&:id) }
+
+ it 'enqueue jobs to HashedStorage::ProjectRollbackWorker' do
+ Sidekiq::Testing.fake! do
+ expect { subject.bulk_rollback(start: ids.min, finish: ids.max) }.to change(HashedStorage::ProjectRollbackWorker.jobs, :size).by(2)
+ end
+ end
+
+ it 'rescues and log exceptions' do
+ allow_any_instance_of(Project).to receive(:rollback_to_legacy_storage!).and_raise(StandardError)
+ expect { subject.bulk_rollback(start: ids.min, finish: ids.max) }.not_to raise_error
+ end
+
+ it 'delegates each project in specified range to #rollback' do
+ projects.each do |project|
+ expect(subject).to receive(:rollback).with(project)
+ end
+
+ subject.bulk_rollback(start: ids.min, finish: ids.max)
+ end
+
+ it 'has all projects rolledback and set as writable' do
+ perform_enqueued_jobs do
+ subject.bulk_rollback(start: ids.min, finish: ids.max)
+ end
+
+ projects.each do |project|
+ project.reload
+
+ expect(project.legacy_storage?).to be_truthy
+ expect(project.repository_read_only?).to be_falsey
end
end
end
@@ -48,7 +96,7 @@ describe Gitlab::HashedStorage::Migrator do
it 'enqueues project migration job' do
Sidekiq::Testing.fake! do
- expect { subject.migrate(project) }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(1)
+ expect { subject.migrate(project) }.to change(HashedStorage::ProjectMigrateWorker.jobs, :size).by(1)
end
end
@@ -79,7 +127,7 @@ describe Gitlab::HashedStorage::Migrator do
it 'doesnt enqueue any migration job' do
Sidekiq::Testing.fake! do
- expect { subject.migrate(project) }.not_to change(ProjectMigrateHashedStorageWorker.jobs, :size)
+ expect { subject.migrate(project) }.not_to change(HashedStorage::ProjectMigrateWorker.jobs, :size)
end
end
@@ -88,4 +136,50 @@ describe Gitlab::HashedStorage::Migrator do
end
end
end
+
+ describe '#rollback' do
+ let(:project) { create(:project, :empty_repo) }
+
+ it 'enqueues project rollback job' do
+ Sidekiq::Testing.fake! do
+ expect { subject.rollback(project) }.to change(HashedStorage::ProjectRollbackWorker.jobs, :size).by(1)
+ end
+ end
+
+ it 'rescues and log exceptions' do
+ allow(project).to receive(:rollback_to_hashed_storage!).and_raise(StandardError)
+
+ expect { subject.rollback(project) }.not_to raise_error
+ end
+
+ it 'rolls-back project storage' do
+ perform_enqueued_jobs do
+ subject.rollback(project)
+ end
+
+ expect(project.reload.legacy_storage?).to be_truthy
+ end
+
+ it 'has rolled-back project set as writable' do
+ perform_enqueued_jobs do
+ subject.rollback(project)
+ end
+
+ expect(project.reload.repository_read_only?).to be_falsey
+ end
+
+ context 'when project is already on legacy storage' do
+ let(:project) { create(:project, :legacy_storage, :empty_repo) }
+
+ it 'doesnt enqueue any rollback job' do
+ Sidekiq::Testing.fake! do
+ expect { subject.rollback(project) }.not_to change(HashedStorage::ProjectRollbackWorker.jobs, :size)
+ end
+ end
+
+ it 'returns false' do
+ expect(subject.rollback(project)).to be_falsey
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 018a5d3dd3d..01da3ea7081 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -127,7 +127,7 @@ ci_pipelines:
- scheduled_actions
- artifacts
- pipeline_schedule
-- merge_requests
+- merge_requests_as_head_pipeline
- merge_request
- deployments
- environments
diff --git a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
index 68eaa70e6b6..4b234411a44 100644
--- a/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
+++ b/spec/lib/gitlab/import_export/merge_request_parser_spec.rb
@@ -41,4 +41,20 @@ describe Gitlab::ImportExport::MergeRequestParser do
expect(parsed_merge_request).to eq(merge_request)
end
+
+ context 'when the merge request has diffs' do
+ let(:merge_request) do
+ build(:merge_request, source_project: forked_project, target_project: project)
+ end
+
+ context 'when the diff is invalid' do
+ let(:merge_request_diff) { build(:merge_request_diff, merge_request: merge_request, base_commit_sha: 'foobar') }
+
+ it 'sets the diff to nil' do
+ expect(merge_request_diff).to be_invalid
+ expect(merge_request_diff.merge_request).to eq merge_request
+ expect(parsed_merge_request.merge_request_diff).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
index b52078e8556..2cae8ec031a 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/lib/gitlab/json_cache_spec.rb
@@ -297,13 +297,39 @@ describe Gitlab::JsonCache do
expect(result).to eq(broadcast_message)
end
+ context 'when the cached value is an instance of ActiveRecord::Base' do
+ it 'returns a persisted record when id is set' do
+ backend.write(expanded_key, broadcast_message.to_json)
+
+ result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+
+ expect(result).to be_persisted
+ end
+
+ it 'returns a new record when id is nil' do
+ backend.write(expanded_key, build(:broadcast_message).to_json)
+
+ result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+
+ expect(result).to be_new_record
+ end
+
+ it 'returns a new record when id is missing' do
+ backend.write(expanded_key, build(:broadcast_message).attributes.except('id').to_json)
+
+ result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+
+ expect(result).to be_new_record
+ end
+ end
+
it "returns the result of the block when 'as' option is nil" do
result = cache.fetch(key, as: nil) { 'block result' }
expect(result).to eq('block result')
end
- it "returns the result of the block when 'as' option is not informed" do
+ it "returns the result of the block when 'as' option is missing" do
result = cache.fetch(key) { 'block result' }
expect(result).to eq('block result')
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index 02364e92149..978e64c4407 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -50,6 +50,36 @@ describe Gitlab::Kubernetes::KubeClient do
end
end
+ describe '#initialize' do
+ shared_examples 'local address' do
+ it 'blocks local addresses' do
+ expect { client }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError)
+ end
+
+ context 'when local requests are allowed' do
+ before do
+ stub_application_setting(allow_local_requests_from_hooks_and_services: true)
+ end
+
+ it 'allows local addresses' do
+ expect { client }.not_to raise_error
+ end
+ end
+ end
+
+ context 'localhost address' do
+ let(:api_url) { 'http://localhost:22' }
+
+ it_behaves_like 'local address'
+ end
+
+ context 'private network address' do
+ let(:api_url) { 'http://192.168.1.2:3003' }
+
+ it_behaves_like 'local address'
+ end
+ end
+
describe '#core_client' do
subject { client.core_client }
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index e90e0aba0a4..312e5e55af8 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -107,7 +107,7 @@ describe Gitlab::PathRegex do
git = Gitlab.config.git.bin_path
tracked = `cd #{Rails.root} && #{git} ls-files public`
.split("\n")
- .map { |entry| entry.gsub('public/', '') }
+ .map { |entry| entry.start_with?('public/-/') ? '-' : entry.gsub('public/', '') }
.uniq
tracked + %w(assets uploads)
end
diff --git a/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb b/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
index 420218a695a..936447b8474 100644
--- a/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::Prometheus::Queries::MatchedMetricQuery do
[{ '__name__' => 'metric_a' },
{ '__name__' => 'metric_b' }]
end
- let(:partialy_empty_series_info) { [{ '__name__' => 'metric_a', 'environment' => '' }] }
+ let(:partially_empty_series_info) { [{ '__name__' => 'metric_a', 'environment' => '' }] }
let(:empty_series_info) { [] }
let(:client) { double('prometheus_client') }
@@ -60,7 +60,7 @@ describe Gitlab::Prometheus::Queries::MatchedMetricQuery do
context 'one of the series info was not found' do
before do
- allow(client).to receive(:series).and_return(partialy_empty_series_info)
+ allow(client).to receive(:series).and_return(partially_empty_series_info)
end
it 'responds with one active and one missing metric' do
expect(subject.query).to eq([{ group: 'name', priority: 1, active_metrics: 1, metrics_missing_requirements: 1 }])
diff --git a/spec/lib/gitlab/quick_actions/command_definition_spec.rb b/spec/lib/gitlab/quick_actions/command_definition_spec.rb
index 5dae82a63b4..136cfb5bcc5 100644
--- a/spec/lib/gitlab/quick_actions/command_definition_spec.rb
+++ b/spec/lib/gitlab/quick_actions/command_definition_spec.rb
@@ -72,7 +72,7 @@ describe Gitlab::QuickActions::CommandDefinition do
end
describe "#execute" do
- let(:context) { OpenStruct.new(run: false) }
+ let(:context) { OpenStruct.new(run: false, commands_executed_count: nil) }
context "when the command is a noop" do
it "doesn't execute the command" do
@@ -80,6 +80,7 @@ describe Gitlab::QuickActions::CommandDefinition do
subject.execute(context, nil)
+ expect(context.commands_executed_count).to be_nil
expect(context.run).to be false
end
end
@@ -97,6 +98,7 @@ describe Gitlab::QuickActions::CommandDefinition do
it "doesn't execute the command" do
subject.execute(context, nil)
+ expect(context.commands_executed_count).to be_nil
expect(context.run).to be false
end
end
@@ -112,6 +114,7 @@ describe Gitlab::QuickActions::CommandDefinition do
subject.execute(context, true)
expect(context.run).to be true
+ expect(context.commands_executed_count).to eq(1)
end
end
@@ -120,6 +123,7 @@ describe Gitlab::QuickActions::CommandDefinition do
subject.execute(context, nil)
expect(context.run).to be true
+ expect(context.commands_executed_count).to eq(1)
end
end
end
@@ -134,6 +138,7 @@ describe Gitlab::QuickActions::CommandDefinition do
subject.execute(context, true)
expect(context.run).to be true
+ expect(context.commands_executed_count).to eq(1)
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
new file mode 100644
index 00000000000..ff8c0825ee4
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
@@ -0,0 +1,74 @@
+require 'spec_helper'
+
+describe Gitlab::SidekiqMiddleware::MemoryKiller do
+ subject { described_class.new }
+ let(:pid) { 999 }
+
+ let(:worker) { double(:worker, class: 'TestWorker') }
+ let(:job) { { 'jid' => 123 } }
+ let(:queue) { 'test_queue' }
+
+ def run
+ thread = subject.call(worker, job, queue) { nil }
+ thread&.join
+ end
+
+ before do
+ allow(subject).to receive(:get_rss).and_return(10.kilobytes)
+ allow(subject).to receive(:pid).and_return(pid)
+ end
+
+ context 'when MAX_RSS is set to 0' do
+ before do
+ stub_const("#{described_class}::MAX_RSS", 0)
+ end
+
+ it 'does nothing' do
+ expect(subject).not_to receive(:sleep)
+
+ run
+ end
+ end
+
+ context 'when MAX_RSS is exceeded' do
+ before do
+ stub_const("#{described_class}::MAX_RSS", 5.kilobytes)
+ end
+
+ it 'sends the TSTP, TERM and KILL signals at expected times' do
+ expect(subject).to receive(:sleep).with(15 * 60).ordered
+ expect(Process).to receive(:kill).with('SIGTSTP', pid).ordered
+
+ expect(subject).to receive(:sleep).with(30).ordered
+ expect(Process).to receive(:kill).with('SIGTERM', pid).ordered
+
+ expect(subject).to receive(:sleep).with(10).ordered
+ expect(Process).to receive(:kill).with('SIGKILL', pid).ordered
+
+ run
+ end
+
+ it 'sends TSTP and TERM to the pid, but KILL to the pgroup, when running as process leader' do
+ allow(Process).to receive(:getpgrp) { pid }
+ allow(subject).to receive(:sleep)
+
+ expect(Process).to receive(:kill).with('SIGTSTP', pid).ordered
+ expect(Process).to receive(:kill).with('SIGTERM', pid).ordered
+ expect(Process).to receive(:kill).with('SIGKILL', "-#{pid}").ordered
+
+ run
+ end
+ end
+
+ context 'when MAX_RSS is not exceeded' do
+ before do
+ stub_const("#{described_class}::MAX_RSS", 15.kilobytes)
+ end
+
+ it 'does nothing' do
+ expect(subject).not_to receive(:sleep)
+
+ run
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/shutdown_spec.rb b/spec/lib/gitlab/sidekiq_middleware/shutdown_spec.rb
deleted file mode 100644
index 0001795c3f0..00000000000
--- a/spec/lib/gitlab/sidekiq_middleware/shutdown_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-require 'spec_helper'
-
-describe Gitlab::SidekiqMiddleware::Shutdown do
- subject { described_class.new }
-
- let(:pid) { Process.pid }
- let(:worker) { double(:worker, class: 'TestWorker') }
- let(:job) { { 'jid' => 123 } }
- let(:queue) { 'test_queue' }
- let(:block) { proc { nil } }
-
- def run
- subject.call(worker, job, queue) { block.call }
- described_class.shutdown_thread&.join
- end
-
- def pop_trace
- subject.trace.pop(true)
- end
-
- before do
- allow(subject).to receive(:get_rss).and_return(10.kilobytes)
- described_class.clear_shutdown_thread
- end
-
- context 'when MAX_RSS is set to 0' do
- before do
- stub_const("#{described_class}::MAX_RSS", 0)
- end
-
- it 'does nothing' do
- expect(subject).not_to receive(:sleep)
-
- run
- end
- end
-
- def expect_shutdown_sequence
- expect(pop_trace).to eq([:sleep, 15 * 60])
- expect(pop_trace).to eq([:kill, 'SIGTSTP', pid])
-
- expect(pop_trace).to eq([:sleep, 30])
- expect(pop_trace).to eq([:kill, 'SIGTERM', pid])
-
- expect(pop_trace).to eq([:sleep, 10])
- expect(pop_trace).to eq([:kill, 'SIGKILL', pid])
- end
-
- context 'when MAX_RSS is exceeded' do
- before do
- stub_const("#{described_class}::MAX_RSS", 5.kilobytes)
- end
-
- it 'sends the TSTP, TERM and KILL signals at expected times' do
- run
-
- expect_shutdown_sequence
- end
- end
-
- context 'when MAX_RSS is not exceeded' do
- before do
- stub_const("#{described_class}::MAX_RSS", 15.kilobytes)
- end
-
- it 'does nothing' do
- expect(subject).not_to receive(:sleep)
-
- run
- end
- end
-
- context 'when WantShutdown is raised' do
- let(:block) { proc { raise described_class::WantShutdown } }
-
- it 'starts the shutdown sequence and re-raises the exception' do
- expect { run }.to raise_exception(described_class::WantShutdown)
-
- # We can't expect 'run' to have joined on the shutdown thread, because
- # it hit an exception.
- shutdown_thread = described_class.shutdown_thread
- expect(shutdown_thread).not_to be_nil
- shutdown_thread.join
-
- expect_shutdown_sequence
- end
- end
-end
diff --git a/spec/lib/gitlab/sidekiq_signals_spec.rb b/spec/lib/gitlab/sidekiq_signals_spec.rb
new file mode 100644
index 00000000000..77ecd1840d2
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_signals_spec.rb
@@ -0,0 +1,69 @@
+require 'spec_helper'
+
+describe Gitlab::SidekiqSignals do
+ describe '.install' do
+ let(:result) { Hash.new { |h, k| h[k] = 0 } }
+ let(:int_handler) { -> (_) { result['INT'] += 1 } }
+ let(:term_handler) { -> (_) { result['TERM'] += 1 } }
+ let(:other_handler) { -> (_) { result['OTHER'] += 1 } }
+ let(:signals) { { 'INT' => int_handler, 'TERM' => term_handler, 'OTHER' => other_handler } }
+
+ context 'not a process group leader' do
+ before do
+ allow(Process).to receive(:getpgrp) { Process.pid * 2 }
+ end
+
+ it 'does nothing' do
+ expect { described_class.install!(signals) }
+ .not_to change { signals }
+ end
+ end
+
+ context 'as a process group leader' do
+ before do
+ allow(Process).to receive(:getpgrp) { Process.pid }
+ end
+
+ it 'installs its own signal handlers for TERM and INT only' do
+ described_class.install!(signals)
+
+ expect(signals['INT']).not_to eq(int_handler)
+ expect(signals['TERM']).not_to eq(term_handler)
+ expect(signals['OTHER']).to eq(other_handler)
+ end
+
+ %w[INT TERM].each do |signal|
+ it "installs a forwarding signal handler for #{signal}" do
+ described_class.install!(signals)
+
+ expect(described_class)
+ .to receive(:trap)
+ .with(signal, 'IGNORE')
+ .and_return(:original_trap)
+ .ordered
+
+ expect(Process)
+ .to receive(:kill)
+ .with(signal, 0)
+ .ordered
+
+ expect(described_class)
+ .to receive(:trap)
+ .with(signal, :original_trap)
+ .ordered
+
+ signals[signal].call(:cli)
+
+ expect(result[signal]).to eq(1)
+ end
+
+ it "raises if sidekiq no longer traps SIG#{signal}" do
+ signals.delete(signal)
+
+ expect { described_class.install!(signals) }
+ .to raise_error(/Sidekiq should have registered/)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb
index 5f7a0cca351..8232715d00e 100644
--- a/spec/lib/gitlab_spec.rb
+++ b/spec/lib/gitlab_spec.rb
@@ -95,4 +95,18 @@ describe Gitlab do
expect(described_class.com?).to eq false
end
end
+
+ describe '.ee?' do
+ it 'returns true when using Enterprise Edition' do
+ stub_const('License', Class.new)
+
+ expect(described_class.ee?).to eq(true)
+ end
+
+ it 'returns false when using Community Edition' do
+ hide_const('License')
+
+ expect(described_class.ee?).to eq(false)
+ end
+ end
end
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index 1024e1a25ea..8ccbd90ddb8 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -121,7 +121,7 @@ describe ObjectStorage::DirectUpload do
expect(subject[:MultipartUpload][:PartURLs].length).to eq(2)
end
- it 'part size is mimimum, 5MB' do
+ it 'part size is minimum, 5MB' do
expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
end
end
diff --git a/spec/mailers/abuse_report_mailer_spec.rb b/spec/mailers/abuse_report_mailer_spec.rb
index bda892083b3..f96870cc112 100644
--- a/spec/mailers/abuse_report_mailer_spec.rb
+++ b/spec/mailers/abuse_report_mailer_spec.rb
@@ -4,25 +4,24 @@ describe AbuseReportMailer do
include EmailSpec::Matchers
describe '.notify' do
- context 'with admin_notification_email set' do
- before do
- stub_application_setting(admin_notification_email: 'admin@example.com')
- end
+ before do
+ stub_application_setting(admin_notification_email: 'admin@example.com')
+ end
- it 'sends to the admin_notification_email' do
- report = create(:abuse_report)
+ let(:report) { create(:abuse_report) }
+
+ subject { described_class.notify(report.id) }
- mail = described_class.notify(report.id)
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
- expect(mail).to deliver_to 'admin@example.com'
+ context 'with admin_notification_email set' do
+ it 'sends to the admin_notification_email' do
+ is_expected.to deliver_to 'admin@example.com'
end
it 'includes the user in the subject' do
- report = create(:abuse_report)
-
- mail = described_class.notify(report.id)
-
- expect(mail).to have_subject "#{report.user.name} (#{report.user.username}) was reported for abuse"
+ is_expected.to have_subject "#{report.user.name} (#{report.user.username}) was reported for abuse"
end
end
diff --git a/spec/mailers/email_rejection_mailer_spec.rb b/spec/mailers/email_rejection_mailer_spec.rb
new file mode 100644
index 00000000000..bbe0a50ae8e
--- /dev/null
+++ b/spec/mailers/email_rejection_mailer_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe EmailRejectionMailer do
+ include EmailSpec::Matchers
+
+ describe '#rejection' do
+ let(:raw_email) { 'From: someone@example.com\nraw email here' }
+
+ subject { described_class.rejection('some rejection reason', raw_email) }
+
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+ end
+end
diff --git a/spec/mailers/emails/auto_devops_spec.rb b/spec/mailers/emails/auto_devops_spec.rb
index 839caf3f50e..dd7c12c3143 100644
--- a/spec/mailers/emails/auto_devops_spec.rb
+++ b/spec/mailers/emails/auto_devops_spec.rb
@@ -13,6 +13,9 @@ describe Emails::AutoDevops do
subject { Notify.autodevops_disabled_email(pipeline, owner.email) }
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
it 'sents email with correct subject' do
is_expected.to have_subject("#{project.name} | Auto DevOps pipeline was disabled for #{project.name}")
end
diff --git a/spec/mailers/emails/issues_spec.rb b/spec/mailers/emails/issues_spec.rb
index 09253cf8003..5b5bd6f4308 100644
--- a/spec/mailers/emails/issues_spec.rb
+++ b/spec/mailers/emails/issues_spec.rb
@@ -29,5 +29,14 @@ describe Emails::Issues do
expect(subject).to have_body_text "23, 34, 58"
end
+
+ context 'with header and footer' do
+ let(:results) { { success: 165, error_lines: [], parse_error: false } }
+
+ subject { Notify.import_issues_csv_email(user.id, project.id, results) }
+
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+ end
end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 4f578c48d5b..3c8897ed37c 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -53,6 +53,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Issue link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject and body' do
aggregate_failures do
@@ -72,6 +74,9 @@ describe Notify do
context 'when sent with a reason' do
subject { described_class.new_issue_email(issue.assignees.first.id, issue.id, NotificationReason::ASSIGNED) }
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
it 'includes the reason in a header' do
is_expected.to have_header('X-GitLab-NotificationReason', NotificationReason::ASSIGNED)
end
@@ -99,6 +104,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Issue link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -118,6 +125,9 @@ describe Notify do
context 'when sent with a reason' do
subject { described_class.reassigned_issue_email(recipient.id, issue.id, [previous_assignee.id], current_user.id, NotificationReason::ASSIGNED) }
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
it 'includes the reason in a header' do
is_expected.to have_header('X-GitLab-NotificationReason', NotificationReason::ASSIGNED)
end
@@ -134,6 +144,8 @@ describe Notify do
it_behaves_like 'it should show Gmail Actions View Issue link'
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'an email with a labels subscriptions link in its footer'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -173,6 +185,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Issue link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -194,23 +208,53 @@ describe Notify do
let(:new_issue) { create(:issue) }
subject { described_class.issue_moved_email(recipient, issue, new_issue, current_user) }
- it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
- let(:model) { issue }
- end
- it_behaves_like 'it should show Gmail Actions View Issue link'
- it_behaves_like 'an unsubscribeable thread'
+ context 'when a user has permissions to access the new issue' do
+ before do
+ new_issue.project.add_developer(recipient)
+ end
+
+ it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
+ let(:model) { issue }
+ end
+ it_behaves_like 'it should show Gmail Actions View Issue link'
+ it_behaves_like 'an unsubscribeable thread'
+
+ it 'contains description about action taken' do
+ is_expected.to have_body_text 'Issue was moved to another project'
+ end
+
+ it 'has the correct subject and body' do
+ new_issue_url = project_issue_path(new_issue.project, new_issue)
+
+ aggregate_failures do
+ is_expected.to have_referable_subject(issue, reply: true)
+ is_expected.to have_body_text(new_issue_url)
+ is_expected.to have_body_text(project_issue_path(project, issue))
+ end
+ end
- it 'contains description about action taken' do
- is_expected.to have_body_text 'Issue was moved to another project'
+ it 'contains the issue title' do
+ is_expected.to have_body_text new_issue.title
+ end
end
- it 'has the correct subject and body' do
- new_issue_url = project_issue_path(new_issue.project, new_issue)
+ context 'when a user does not permissions to access the new issue' do
+ it 'has the correct subject and body' do
+ new_issue_url = project_issue_path(new_issue.project, new_issue)
- aggregate_failures do
- is_expected.to have_referable_subject(issue, reply: true)
- is_expected.to have_body_text(new_issue_url)
- is_expected.to have_body_text(project_issue_path(project, issue))
+ aggregate_failures do
+ is_expected.to have_referable_subject(issue, reply: true)
+ is_expected.not_to have_body_text(new_issue_url)
+ is_expected.to have_body_text(project_issue_path(project, issue))
+ end
+ end
+
+ it 'does not contain the issue title' do
+ is_expected.not_to have_body_text new_issue.title
+ end
+
+ it 'contains information about missing permissions' do
+ is_expected.to have_body_text "You don't have access to the project."
end
end
end
@@ -226,6 +270,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject and body' do
aggregate_failures do
@@ -243,6 +289,9 @@ describe Notify do
context 'when sent with a reason' do
subject { described_class.new_merge_request_email(merge_request.assignee_id, merge_request.id, NotificationReason::ASSIGNED) }
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
it 'includes the reason in a header' do
is_expected.to have_header('X-GitLab-NotificationReason', NotificationReason::ASSIGNED)
end
@@ -270,6 +319,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like "an unsubscribeable thread"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -289,6 +340,9 @@ describe Notify do
context 'when sent with a reason' do
subject { described_class.reassigned_merge_request_email(recipient.id, merge_request.id, previous_assignee.id, current_user.id, NotificationReason::ASSIGNED) }
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
it 'includes the reason in a header' do
is_expected.to have_header('X-GitLab-NotificationReason', NotificationReason::ASSIGNED)
end
@@ -313,6 +367,8 @@ describe Notify do
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like "an unsubscribeable thread"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains the description' do
is_expected.to have_body_text(merge_request.description)
@@ -329,6 +385,8 @@ describe Notify do
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'an email with a labels subscriptions link in its footer'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -352,6 +410,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -379,6 +439,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the merge author' do
sender = subject.header[:from].addrs[0]
@@ -413,6 +475,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the merge request author' do
sender = subject.header[:from].addrs[0]
@@ -442,6 +506,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the push user' do
sender = subject.header[:from].addrs[0]
@@ -482,6 +548,9 @@ describe Notify do
subject { described_class.note_issue_email(recipient.id, third_note.id) }
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
it 'has In-Reply-To header pointing to previous note in discussion' do
expect(subject.header['In-Reply-To'].message_ids).to eq(["note_#{second_note.id}@#{host}"])
end
@@ -502,6 +571,9 @@ describe Notify do
subject { described_class.note_issue_email(recipient.id, note.id) }
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
it 'has In-Reply-To header pointing to the issue' do
expect(subject.header['In-Reply-To'].message_ids).to eq(["issue_#{note.noteable.id}@#{host}"])
end
@@ -518,6 +590,9 @@ describe Notify do
subject { described_class.note_project_snippet_email(project_snippet_note.author_id, project_snippet_note.id) }
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
it_behaves_like 'an answer to an existing thread with reply-by-email enabled' do
let(:model) { project_snippet }
end
@@ -535,6 +610,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject and body' do
is_expected.to have_subject("#{project.name} | Project was moved")
@@ -559,6 +636,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
to_emails = subject.header[:to].addrs.map(&:address)
@@ -582,6 +661,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject "Access to the #{project.full_name} project was denied"
@@ -599,6 +680,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject "Access to the #{project.full_name} project was granted"
@@ -629,6 +712,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject "Invitation to join the #{project.full_name} project"
@@ -653,6 +738,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject 'Invitation accepted'
@@ -676,6 +763,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject 'Invitation declined'
@@ -708,6 +797,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Commit link'
it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject and body' do
aggregate_failures do
@@ -732,6 +823,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject and body' do
aggregate_failures do
@@ -756,6 +849,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Issue link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject and body' do
aggregate_failures do
@@ -819,6 +914,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Commit link'
it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject' do
is_expected.to have_subject "Re: #{project.name} | #{commit.title} (#{commit.short_id})"
@@ -845,6 +942,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject' do
is_expected.to have_referable_subject(merge_request, reply: true)
@@ -871,6 +970,8 @@ describe Notify do
end
it_behaves_like 'it should show Gmail Actions View Issue link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject' do
is_expected.to have_referable_subject(issue, reply: true)
@@ -948,6 +1049,8 @@ describe Notify do
it_behaves_like 'an email for a note on a diff discussion', :diff_note_on_commit
it_behaves_like 'it should show Gmail Actions View Commit link'
it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
end
describe 'on a merge request' do
@@ -958,6 +1061,8 @@ describe Notify do
it_behaves_like 'an email for a note on a diff discussion', :diff_note_on_merge_request
it_behaves_like 'it should show Gmail Actions View Merge request link'
it_behaves_like 'an unsubscribeable thread'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
end
end
end
@@ -976,6 +1081,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
to_emails = subject.header[:to].addrs.map(&:address)
@@ -998,6 +1105,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject "Access to the #{group.name} group was denied"
@@ -1014,6 +1123,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject "Access to the #{group.name} group was granted"
@@ -1044,6 +1155,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject "Invitation to join the #{group.name} group"
@@ -1068,6 +1181,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject 'Invitation accepted'
@@ -1091,6 +1206,8 @@ describe Notify do
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like "a user cannot unsubscribe through footer link"
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'contains all the useful information' do
is_expected.to have_subject 'Invitation declined'
@@ -1140,6 +1257,8 @@ describe Notify do
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'an email with X-GitLab headers containing project details'
it_behaves_like 'an email that contains a header with author username'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -1165,6 +1284,8 @@ describe Notify do
it_behaves_like "a user cannot unsubscribe through footer link"
it_behaves_like 'an email with X-GitLab headers containing project details'
it_behaves_like 'an email that contains a header with author username'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -1189,6 +1310,8 @@ describe Notify do
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'an email with X-GitLab headers containing project details'
it_behaves_like 'an email that contains a header with author username'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -1210,6 +1333,8 @@ describe Notify do
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'an email with X-GitLab headers containing project details'
it_behaves_like 'an email that contains a header with author username'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -1237,6 +1362,8 @@ describe Notify do
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'an email with X-GitLab headers containing project details'
it_behaves_like 'an email that contains a header with author username'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -1328,6 +1455,8 @@ describe Notify do
it_behaves_like 'a user cannot unsubscribe through footer link'
it_behaves_like 'an email with X-GitLab headers containing project details'
it_behaves_like 'an email that contains a header with author username'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
sender = subject.header[:from].addrs[0]
@@ -1348,6 +1477,11 @@ describe Notify do
describe 'HTML emails setting' do
let(:multipart_mail) { described_class.project_was_moved_email(project.id, user.id, "gitlab/gitlab") }
+ subject { multipart_mail }
+
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
context 'when disabled' do
it 'only sends the text template' do
stub_application_setting(html_emails_enabled: false)
@@ -1386,6 +1520,8 @@ describe Notify do
subject { described_class.note_personal_snippet_email(personal_snippet_note.author_id, personal_snippet_note.id) }
it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
it 'has the correct subject and body' do
is_expected.to have_referable_subject(personal_snippet, reply: true)
diff --git a/spec/mailers/repository_check_mailer_spec.rb b/spec/mailers/repository_check_mailer_spec.rb
index 00613c7b671..384660f7221 100644
--- a/spec/mailers/repository_check_mailer_spec.rb
+++ b/spec/mailers/repository_check_mailer_spec.rb
@@ -17,5 +17,12 @@ describe RepositoryCheckMailer do
expect(mail).to have_subject 'GitLab Admin | 3 projects failed their last repository check'
end
+
+ context 'with footer and header' do
+ subject { described_class.notify(1) }
+
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+ end
end
end
diff --git a/spec/models/active_session_spec.rb b/spec/models/active_session_spec.rb
index 129b2f92683..e128fe8a4b7 100644
--- a/spec/models/active_session_spec.rb
+++ b/spec/models/active_session_spec.rb
@@ -7,7 +7,10 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_shared_state do
end
end
- let(:session) { double(:session, id: '6919a6f1bb119dd7396fadc38fd18d0d') }
+ let(:session) do
+ double(:session, { id: '6919a6f1bb119dd7396fadc38fd18d0d',
+ '[]': {} })
+ end
let(:request) do
double(:request, {
diff --git a/spec/models/appearance_spec.rb b/spec/models/appearance_spec.rb
index 28d482adebf..3e95aa2b5dd 100644
--- a/spec/models/appearance_spec.rb
+++ b/spec/models/appearance_spec.rb
@@ -78,4 +78,22 @@ describe Appearance do
it { is_expected.to allow_value(hex).for(:message_font_color) }
it { is_expected.not_to allow_value('000').for(:message_font_color) }
end
+
+ describe 'email_header_and_footer_enabled' do
+ context 'default email_header_and_footer_enabled flag value' do
+ it 'returns email_header_and_footer_enabled as true' do
+ appearance = build(:appearance)
+
+ expect(appearance.email_header_and_footer_enabled?).to eq(false)
+ end
+ end
+
+ context 'when setting email_header_and_footer_enabled flag value' do
+ it 'returns email_header_and_footer_enabled as true' do
+ appearance = build(:appearance, email_header_and_footer_enabled: true)
+
+ expect(appearance.email_header_and_footer_enabled?).to eq(true)
+ end
+ end
+ end
end
diff --git a/spec/models/board_group_recent_visit_spec.rb b/spec/models/board_group_recent_visit_spec.rb
index 59ad4e5417e..558be61824f 100644
--- a/spec/models/board_group_recent_visit_spec.rb
+++ b/spec/models/board_group_recent_visit_spec.rb
@@ -50,15 +50,25 @@ describe BoardGroupRecentVisit do
end
describe '#latest' do
- it 'returns the most recent visited' do
- board2 = create(:board, group: group)
- board3 = create(:board, group: group)
+ def create_visit(time)
+ create :board_group_recent_visit, group: group, user: user, updated_at: time
+ end
- create :board_group_recent_visit, group: board.group, board: board, user: user, updated_at: 7.days.ago
- create :board_group_recent_visit, group: board2.group, board: board2, user: user, updated_at: 5.days.ago
- recent = create :board_group_recent_visit, group: board3.group, board: board3, user: user, updated_at: 1.day.ago
+ it 'returns the most recent visited' do
+ create_visit(7.days.ago)
+ create_visit(5.days.ago)
+ recent = create_visit(1.day.ago)
expect(described_class.latest(user, group)).to eq recent
end
+
+ it 'returns last 3 visited boards' do
+ create_visit(7.days.ago)
+ visit1 = create_visit(3.days.ago)
+ visit2 = create_visit(2.days.ago)
+ visit3 = create_visit(5.days.ago)
+
+ expect(described_class.latest(user, group, count: 3)).to eq([visit2, visit1, visit3])
+ end
end
end
diff --git a/spec/models/board_project_recent_visit_spec.rb b/spec/models/board_project_recent_visit_spec.rb
index 275581945fa..e404fb3bbdb 100644
--- a/spec/models/board_project_recent_visit_spec.rb
+++ b/spec/models/board_project_recent_visit_spec.rb
@@ -50,15 +50,25 @@ describe BoardProjectRecentVisit do
end
describe '#latest' do
- it 'returns the most recent visited' do
- board2 = create(:board, project: project)
- board3 = create(:board, project: project)
+ def create_visit(time)
+ create :board_project_recent_visit, project: project, user: user, updated_at: time
+ end
- create :board_project_recent_visit, project: board.project, board: board, user: user, updated_at: 7.days.ago
- create :board_project_recent_visit, project: board2.project, board: board2, user: user, updated_at: 5.days.ago
- recent = create :board_project_recent_visit, project: board3.project, board: board3, user: user, updated_at: 1.day.ago
+ it 'returns the most recent visited' do
+ create_visit(7.days.ago)
+ create_visit(5.days.ago)
+ recent = create_visit(1.day.ago)
expect(described_class.latest(user, project)).to eq recent
end
+
+ it 'returns last 3 visited boards' do
+ create_visit(7.days.ago)
+ visit1 = create_visit(3.days.ago)
+ visit2 = create_visit(2.days.ago)
+ visit3 = create_visit(5.days.ago)
+
+ expect(described_class.latest(user, project, count: 3)).to eq([visit2, visit1, visit3])
+ end
end
end
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 741cdfef1a5..b5ec8991720 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -22,4 +22,19 @@ describe Ci::Bridge do
expect(status).to be_a Gitlab::Ci::Status::Success
end
end
+
+ describe '#scoped_variables_hash' do
+ it 'returns a hash representing variables' do
+ variables = %w[
+ CI_JOB_NAME CI_JOB_STAGE CI_COMMIT_SHA CI_COMMIT_SHORT_SHA
+ CI_COMMIT_BEFORE_SHA CI_COMMIT_REF_NAME CI_COMMIT_REF_SLUG
+ CI_PROJECT_ID CI_PROJECT_NAME CI_PROJECT_PATH
+ CI_PROJECT_PATH_SLUG CI_PROJECT_NAMESPACE CI_PIPELINE_IID
+ CI_CONFIG_PATH CI_PIPELINE_SOURCE CI_COMMIT_MESSAGE
+ CI_COMMIT_TITLE CI_COMMIT_DESCRIPTION
+ ]
+
+ expect(bridge.scoped_variables_hash.keys).to include(*variables)
+ end
+ end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 17540443688..fc75d3e23fb 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -23,7 +23,7 @@ describe Ci::Build do
it { is_expected.to validate_presence_of(:ref) }
it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) }
- it { is_expected.to delegate_method(:merge_request?).to(:pipeline) }
+ it { is_expected.to delegate_method(:merge_request_event?).to(:pipeline) }
it { is_expected.to be_a(ArtifactMigratable) }
@@ -2114,55 +2114,55 @@ describe Ci::Build do
context 'returns variables' do
let(:predefined_variables) do
[
- { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true },
- { key: 'CI_PIPELINE_URL', value: project.web_url + "/pipelines/#{pipeline.id}", public: true },
- { key: 'CI_JOB_ID', value: build.id.to_s, public: true },
- { key: 'CI_JOB_URL', value: project.web_url + "/-/jobs/#{build.id}", public: true },
- { key: 'CI_JOB_TOKEN', value: 'my-token', public: false },
- { key: 'CI_BUILD_ID', value: build.id.to_s, public: true },
- { key: 'CI_BUILD_TOKEN', value: 'my-token', public: false },
- { key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true },
- { key: 'CI_REGISTRY_PASSWORD', value: 'my-token', public: false },
- { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false },
- { key: 'CI', value: 'true', public: true },
- { key: 'GITLAB_CI', value: 'true', public: true },
- { key: 'GITLAB_FEATURES', value: project.licensed_features.join(','), public: true },
- { key: 'CI_SERVER_NAME', value: 'GitLab', public: true },
- { key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true },
- { key: 'CI_SERVER_VERSION_MAJOR', value: Gitlab.version_info.major.to_s, public: true },
- { key: 'CI_SERVER_VERSION_MINOR', value: Gitlab.version_info.minor.to_s, public: true },
- { key: 'CI_SERVER_VERSION_PATCH', value: Gitlab.version_info.patch.to_s, public: true },
- { key: 'CI_SERVER_REVISION', value: Gitlab.revision, public: true },
- { key: 'CI_JOB_NAME', value: 'test', public: true },
- { key: 'CI_JOB_STAGE', value: 'test', public: true },
- { key: 'CI_COMMIT_SHA', value: build.sha, public: true },
- { key: 'CI_COMMIT_SHORT_SHA', value: build.short_sha, public: true },
- { key: 'CI_COMMIT_BEFORE_SHA', value: build.before_sha, public: true },
- { key: 'CI_COMMIT_REF_NAME', value: build.ref, public: true },
- { key: 'CI_COMMIT_REF_SLUG', value: build.ref_slug, public: true },
- { key: 'CI_NODE_TOTAL', value: '1', public: true },
- { key: 'CI_BUILD_REF', value: build.sha, public: true },
- { key: 'CI_BUILD_BEFORE_SHA', value: build.before_sha, public: true },
- { key: 'CI_BUILD_REF_NAME', value: build.ref, public: true },
- { key: 'CI_BUILD_REF_SLUG', value: build.ref_slug, public: true },
- { key: 'CI_BUILD_NAME', value: 'test', public: true },
- { key: 'CI_BUILD_STAGE', value: 'test', public: true },
- { key: 'CI_PROJECT_ID', value: project.id.to_s, public: true },
- { key: 'CI_PROJECT_NAME', value: project.path, public: true },
- { key: 'CI_PROJECT_PATH', value: project.full_path, public: true },
- { key: 'CI_PROJECT_PATH_SLUG', value: project.full_path_slug, public: true },
- { key: 'CI_PROJECT_NAMESPACE', value: project.namespace.full_path, public: true },
- { key: 'CI_PROJECT_URL', value: project.web_url, public: true },
- { key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true },
- { key: 'CI_PAGES_DOMAIN', value: Gitlab.config.pages.host, public: true },
- { key: 'CI_PAGES_URL', value: project.pages_url, public: true },
- { key: 'CI_API_V4_URL', value: 'http://localhost/api/v4', public: true },
- { key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true },
- { key: 'CI_CONFIG_PATH', value: pipeline.ci_yaml_file_path, public: true },
- { key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true },
- { key: 'CI_COMMIT_MESSAGE', value: pipeline.git_commit_message, public: true },
- { key: 'CI_COMMIT_TITLE', value: pipeline.git_commit_title, public: true },
- { key: 'CI_COMMIT_DESCRIPTION', value: pipeline.git_commit_description, public: true }
+ { key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true, masked: false },
+ { key: 'CI_PIPELINE_URL', value: project.web_url + "/pipelines/#{pipeline.id}", public: true, masked: false },
+ { key: 'CI_JOB_ID', value: build.id.to_s, public: true, masked: false },
+ { key: 'CI_JOB_URL', value: project.web_url + "/-/jobs/#{build.id}", public: true, masked: false },
+ { key: 'CI_JOB_TOKEN', value: 'my-token', public: false, masked: false },
+ { key: 'CI_BUILD_ID', value: build.id.to_s, public: true, masked: false },
+ { key: 'CI_BUILD_TOKEN', value: 'my-token', public: false, masked: false },
+ { key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true, masked: false },
+ { key: 'CI_REGISTRY_PASSWORD', value: 'my-token', public: false, masked: false },
+ { key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false, masked: false },
+ { key: 'CI', value: 'true', public: true, masked: false },
+ { key: 'GITLAB_CI', value: 'true', public: true, masked: false },
+ { key: 'GITLAB_FEATURES', value: project.licensed_features.join(','), public: true, masked: false },
+ { key: 'CI_SERVER_NAME', value: 'GitLab', public: true, masked: false },
+ { key: 'CI_SERVER_VERSION', value: Gitlab::VERSION, public: true, masked: false },
+ { key: 'CI_SERVER_VERSION_MAJOR', value: Gitlab.version_info.major.to_s, public: true, masked: false },
+ { key: 'CI_SERVER_VERSION_MINOR', value: Gitlab.version_info.minor.to_s, public: true, masked: false },
+ { key: 'CI_SERVER_VERSION_PATCH', value: Gitlab.version_info.patch.to_s, public: true, masked: false },
+ { key: 'CI_SERVER_REVISION', value: Gitlab.revision, public: true, masked: false },
+ { key: 'CI_JOB_NAME', value: 'test', public: true, masked: false },
+ { key: 'CI_JOB_STAGE', value: 'test', public: true, masked: false },
+ { key: 'CI_COMMIT_SHA', value: build.sha, public: true, masked: false },
+ { key: 'CI_COMMIT_SHORT_SHA', value: build.short_sha, public: true, masked: false },
+ { key: 'CI_COMMIT_BEFORE_SHA', value: build.before_sha, public: true, masked: false },
+ { key: 'CI_COMMIT_REF_NAME', value: build.ref, public: true, masked: false },
+ { key: 'CI_COMMIT_REF_SLUG', value: build.ref_slug, public: true, masked: false },
+ { key: 'CI_NODE_TOTAL', value: '1', public: true, masked: false },
+ { key: 'CI_BUILD_REF', value: build.sha, public: true, masked: false },
+ { key: 'CI_BUILD_BEFORE_SHA', value: build.before_sha, public: true, masked: false },
+ { key: 'CI_BUILD_REF_NAME', value: build.ref, public: true, masked: false },
+ { key: 'CI_BUILD_REF_SLUG', value: build.ref_slug, public: true, masked: false },
+ { key: 'CI_BUILD_NAME', value: 'test', public: true, masked: false },
+ { key: 'CI_BUILD_STAGE', value: 'test', public: true, masked: false },
+ { key: 'CI_PROJECT_ID', value: project.id.to_s, public: true, masked: false },
+ { key: 'CI_PROJECT_NAME', value: project.path, public: true, masked: false },
+ { key: 'CI_PROJECT_PATH', value: project.full_path, public: true, masked: false },
+ { key: 'CI_PROJECT_PATH_SLUG', value: project.full_path_slug, public: true, masked: false },
+ { key: 'CI_PROJECT_NAMESPACE', value: project.namespace.full_path, public: true, masked: false },
+ { key: 'CI_PROJECT_URL', value: project.web_url, public: true, masked: false },
+ { key: 'CI_PROJECT_VISIBILITY', value: 'private', public: true, masked: false },
+ { key: 'CI_PAGES_DOMAIN', value: Gitlab.config.pages.host, public: true, masked: false },
+ { key: 'CI_PAGES_URL', value: project.pages_url, public: true, masked: false },
+ { key: 'CI_API_V4_URL', value: 'http://localhost/api/v4', public: true, masked: false },
+ { key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true, masked: false },
+ { key: 'CI_CONFIG_PATH', value: pipeline.ci_yaml_file_path, public: true, masked: false },
+ { key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true, masked: false },
+ { key: 'CI_COMMIT_MESSAGE', value: pipeline.git_commit_message, public: true, masked: false },
+ { key: 'CI_COMMIT_TITLE', value: pipeline.git_commit_title, public: true, masked: false },
+ { key: 'CI_COMMIT_DESCRIPTION', value: pipeline.git_commit_description, public: true, masked: false }
]
end
@@ -2175,10 +2175,10 @@ describe Ci::Build do
describe 'variables ordering' do
context 'when variables hierarchy is stubbed' do
- let(:build_pre_var) { { key: 'build', value: 'value', public: true } }
- let(:project_pre_var) { { key: 'project', value: 'value', public: true } }
- let(:pipeline_pre_var) { { key: 'pipeline', value: 'value', public: true } }
- let(:build_yaml_var) { { key: 'yaml', value: 'value', public: true } }
+ let(:build_pre_var) { { key: 'build', value: 'value', public: true, masked: false } }
+ let(:project_pre_var) { { key: 'project', value: 'value', public: true, masked: false } }
+ let(:pipeline_pre_var) { { key: 'pipeline', value: 'value', public: true, masked: false } }
+ let(:build_yaml_var) { { key: 'yaml', value: 'value', public: true, masked: false } }
before do
allow(build).to receive(:predefined_variables) { [build_pre_var] }
@@ -2200,7 +2200,7 @@ describe Ci::Build do
project_pre_var,
pipeline_pre_var,
build_yaml_var,
- { key: 'secret', value: 'value', public: false }])
+ { key: 'secret', value: 'value', public: false, masked: false }])
end
end
@@ -2233,10 +2233,10 @@ describe Ci::Build do
context 'when build has user' do
let(:user_variables) do
[
- { key: 'GITLAB_USER_ID', value: user.id.to_s, public: true },
- { key: 'GITLAB_USER_EMAIL', value: user.email, public: true },
- { key: 'GITLAB_USER_LOGIN', value: user.username, public: true },
- { key: 'GITLAB_USER_NAME', value: user.name, public: true }
+ { key: 'GITLAB_USER_ID', value: user.id.to_s, public: true, masked: false },
+ { key: 'GITLAB_USER_EMAIL', value: user.email, public: true, masked: false },
+ { key: 'GITLAB_USER_LOGIN', value: user.username, public: true, masked: false },
+ { key: 'GITLAB_USER_NAME', value: user.name, public: true, masked: false }
]
end
@@ -2250,8 +2250,8 @@ describe Ci::Build do
context 'when build has an environment' do
let(:environment_variables) do
[
- { key: 'CI_ENVIRONMENT_NAME', value: 'production', public: true },
- { key: 'CI_ENVIRONMENT_SLUG', value: 'prod-slug', public: true }
+ { key: 'CI_ENVIRONMENT_NAME', value: 'production', public: true, masked: false },
+ { key: 'CI_ENVIRONMENT_SLUG', value: 'prod-slug', public: true, masked: false }
]
end
@@ -2286,7 +2286,7 @@ describe Ci::Build do
before do
environment_variables <<
- { key: 'CI_ENVIRONMENT_URL', value: url, public: true }
+ { key: 'CI_ENVIRONMENT_URL', value: url, public: true, masked: false }
end
context 'when the URL was set from the job' do
@@ -2323,7 +2323,7 @@ describe Ci::Build do
end
let(:manual_variable) do
- { key: 'CI_JOB_MANUAL', value: 'true', public: true }
+ { key: 'CI_JOB_MANUAL', value: 'true', public: true, masked: false }
end
it { is_expected.to include(manual_variable) }
@@ -2331,7 +2331,7 @@ describe Ci::Build do
context 'when build is for tag' do
let(:tag_variable) do
- { key: 'CI_COMMIT_TAG', value: 'master', public: true }
+ { key: 'CI_COMMIT_TAG', value: 'master', public: true, masked: false }
end
before do
@@ -2343,7 +2343,7 @@ describe Ci::Build do
context 'when CI variable is defined' do
let(:ci_variable) do
- { key: 'SECRET_KEY', value: 'secret_value', public: false }
+ { key: 'SECRET_KEY', value: 'secret_value', public: false, masked: false }
end
before do
@@ -2358,7 +2358,7 @@ describe Ci::Build do
let(:ref) { Gitlab::Git::BRANCH_REF_PREFIX + build.ref }
let(:protected_variable) do
- { key: 'PROTECTED_KEY', value: 'protected_value', public: false }
+ { key: 'PROTECTED_KEY', value: 'protected_value', public: false, masked: false }
end
before do
@@ -2390,7 +2390,7 @@ describe Ci::Build do
context 'when group CI variable is defined' do
let(:ci_variable) do
- { key: 'SECRET_KEY', value: 'secret_value', public: false }
+ { key: 'SECRET_KEY', value: 'secret_value', public: false, masked: false }
end
before do
@@ -2405,7 +2405,7 @@ describe Ci::Build do
let(:ref) { Gitlab::Git::BRANCH_REF_PREFIX + build.ref }
let(:protected_variable) do
- { key: 'PROTECTED_KEY', value: 'protected_value', public: false }
+ { key: 'PROTECTED_KEY', value: 'protected_value', public: false, masked: false }
end
before do
@@ -2444,11 +2444,11 @@ describe Ci::Build do
let(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, trigger: trigger) }
let(:user_trigger_variable) do
- { key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1', public: false }
+ { key: 'TRIGGER_KEY_1', value: 'TRIGGER_VALUE_1', public: false, masked: false }
end
let(:predefined_trigger_variable) do
- { key: 'CI_PIPELINE_TRIGGERED', value: 'true', public: true }
+ { key: 'CI_PIPELINE_TRIGGERED', value: 'true', public: true, masked: false }
end
before do
@@ -2480,7 +2480,7 @@ describe Ci::Build do
context 'when pipeline has a variable' do
let!(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline) }
- it { is_expected.to include(pipeline_variable.to_runner_variable) }
+ it { is_expected.to include(key: pipeline_variable.key, value: pipeline_variable.value, public: false, masked: false) }
end
context 'when a job was triggered by a pipeline schedule' do
@@ -2497,16 +2497,16 @@ describe Ci::Build do
pipeline_schedule.reload
end
- it { is_expected.to include(pipeline_schedule_variable.to_runner_variable) }
+ it { is_expected.to include(key: pipeline_schedule_variable.key, value: pipeline_schedule_variable.value, public: false, masked: false) }
end
context 'when container registry is enabled' do
let(:container_registry_enabled) { true }
let(:ci_registry) do
- { key: 'CI_REGISTRY', value: 'registry.example.com', public: true }
+ { key: 'CI_REGISTRY', value: 'registry.example.com', public: true, masked: false }
end
let(:ci_registry_image) do
- { key: 'CI_REGISTRY_IMAGE', value: project.container_registry_url, public: true }
+ { key: 'CI_REGISTRY_IMAGE', value: project.container_registry_url, public: true, masked: false }
end
context 'and is disabled for project' do
@@ -2535,13 +2535,13 @@ describe Ci::Build do
build.update(runner: runner)
end
- it { is_expected.to include({ key: 'CI_RUNNER_ID', value: runner.id.to_s, public: true }) }
- it { is_expected.to include({ key: 'CI_RUNNER_DESCRIPTION', value: 'description', public: true }) }
- it { is_expected.to include({ key: 'CI_RUNNER_TAGS', value: 'docker, linux', public: true }) }
+ it { is_expected.to include({ key: 'CI_RUNNER_ID', value: runner.id.to_s, public: true, masked: false }) }
+ it { is_expected.to include({ key: 'CI_RUNNER_DESCRIPTION', value: 'description', public: true, masked: false }) }
+ it { is_expected.to include({ key: 'CI_RUNNER_TAGS', value: 'docker, linux', public: true, masked: false }) }
end
context 'when build is for a deployment' do
- let(:deployment_variable) { { key: 'KUBERNETES_TOKEN', value: 'TOKEN', public: false } }
+ let(:deployment_variable) { { key: 'KUBERNETES_TOKEN', value: 'TOKEN', public: false, masked: false } }
before do
build.environment = 'production'
@@ -2555,7 +2555,7 @@ describe Ci::Build do
end
context 'when project has custom CI config path' do
- let(:ci_config_path) { { key: 'CI_CONFIG_PATH', value: 'custom', public: true } }
+ let(:ci_config_path) { { key: 'CI_CONFIG_PATH', value: 'custom', public: true, masked: false } }
before do
project.update(ci_config_path: 'custom')
@@ -2572,7 +2572,7 @@ describe Ci::Build do
it "includes AUTO_DEVOPS_DOMAIN" do
is_expected.to include(
- { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true })
+ { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true, masked: false })
end
end
@@ -2583,7 +2583,7 @@ describe Ci::Build do
it "includes AUTO_DEVOPS_DOMAIN" do
is_expected.not_to include(
- { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true })
+ { key: 'AUTO_DEVOPS_DOMAIN', value: 'example.com', public: true, masked: false })
end
end
end
@@ -2598,9 +2598,9 @@ describe Ci::Build do
variables = subject.reverse.uniq { |variable| variable[:key] }.reverse
expect(variables)
- .not_to include(key: 'MYVAR', value: 'myvar', public: true)
+ .not_to include(key: 'MYVAR', value: 'myvar', public: true, masked: false)
expect(variables)
- .to include(key: 'MYVAR', value: 'pipeline value', public: false)
+ .to include(key: 'MYVAR', value: 'pipeline value', public: false, masked: false)
end
end
@@ -2616,13 +2616,13 @@ describe Ci::Build do
it 'includes CI_NODE_INDEX' do
is_expected.to include(
- { key: 'CI_NODE_INDEX', value: index.to_s, public: true }
+ { key: 'CI_NODE_INDEX', value: index.to_s, public: true, masked: false }
)
end
it 'includes correct CI_NODE_TOTAL' do
is_expected.to include(
- { key: 'CI_NODE_TOTAL', value: total.to_s, public: true }
+ { key: 'CI_NODE_TOTAL', value: total.to_s, public: true, masked: false }
)
end
end
@@ -2641,7 +2641,7 @@ describe Ci::Build do
it 'returns static predefined variables' do
expect(build.variables.size).to be >= 28
expect(build.variables)
- .to include(key: 'CI_COMMIT_REF_NAME', value: 'feature', public: true)
+ .to include(key: 'CI_COMMIT_REF_NAME', value: 'feature', public: true, masked: false)
expect(build).not_to be_persisted
end
end
@@ -2651,8 +2651,8 @@ describe Ci::Build do
let(:deploy_token_variables) do
[
- { key: 'CI_DEPLOY_USER', value: deploy_token.username, public: true },
- { key: 'CI_DEPLOY_PASSWORD', value: deploy_token.token, public: false }
+ { key: 'CI_DEPLOY_USER', value: deploy_token.username, public: true, masked: false },
+ { key: 'CI_DEPLOY_PASSWORD', value: deploy_token.token, public: false, masked: false }
]
end
@@ -2711,7 +2711,7 @@ describe Ci::Build do
end
expect(variables)
- .to include(key: 'CI_COMMIT_REF_NAME', value: 'feature', public: true)
+ .to include(key: 'CI_COMMIT_REF_NAME', value: 'feature', public: true, masked: false)
end
it 'does not return prohibited variables' do
@@ -2734,6 +2734,122 @@ describe Ci::Build do
end
end
+ describe '#secret_group_variables' do
+ subject { build.secret_group_variables }
+
+ let!(:variable) { create(:ci_group_variable, protected: true, group: group) }
+
+ context 'when ref is branch' do
+ let(:build) { create(:ci_build, ref: 'master', tag: false, project: project) }
+
+ context 'when ref is protected' do
+ before do
+ create(:protected_branch, :developers_can_merge, name: 'master', project: project)
+ end
+
+ it { is_expected.to include(variable) }
+ end
+
+ context 'when ref is not protected' do
+ it { is_expected.not_to include(variable) }
+ end
+ end
+
+ context 'when ref is tag' do
+ let(:build) { create(:ci_build, ref: 'v1.1.0', tag: true, project: project) }
+
+ context 'when ref is protected' do
+ before do
+ create(:protected_tag, project: project, name: 'v*')
+ end
+
+ it { is_expected.to include(variable) }
+ end
+
+ context 'when ref is not protected' do
+ it { is_expected.not_to include(variable) }
+ end
+ end
+
+ context 'when ref is merge request' do
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:build) { create(:ci_build, ref: merge_request.source_branch, tag: false, pipeline: pipeline, project: project) }
+
+ context 'when ref is protected' do
+ before do
+ create(:protected_branch, :developers_can_merge, name: merge_request.source_branch, project: project)
+ end
+
+ it 'does not return protected variables as it is not supported for merge request pipelines' do
+ is_expected.not_to include(variable)
+ end
+ end
+
+ context 'when ref is not protected' do
+ it { is_expected.not_to include(variable) }
+ end
+ end
+ end
+
+ describe '#secret_project_variables' do
+ subject { build.secret_project_variables }
+
+ let!(:variable) { create(:ci_variable, protected: true, project: project) }
+
+ context 'when ref is branch' do
+ let(:build) { create(:ci_build, ref: 'master', tag: false, project: project) }
+
+ context 'when ref is protected' do
+ before do
+ create(:protected_branch, :developers_can_merge, name: 'master', project: project)
+ end
+
+ it { is_expected.to include(variable) }
+ end
+
+ context 'when ref is not protected' do
+ it { is_expected.not_to include(variable) }
+ end
+ end
+
+ context 'when ref is tag' do
+ let(:build) { create(:ci_build, ref: 'v1.1.0', tag: true, project: project) }
+
+ context 'when ref is protected' do
+ before do
+ create(:protected_tag, project: project, name: 'v*')
+ end
+
+ it { is_expected.to include(variable) }
+ end
+
+ context 'when ref is not protected' do
+ it { is_expected.not_to include(variable) }
+ end
+ end
+
+ context 'when ref is merge request' do
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:build) { create(:ci_build, ref: merge_request.source_branch, tag: false, pipeline: pipeline, project: project) }
+
+ context 'when ref is protected' do
+ before do
+ create(:protected_branch, :developers_can_merge, name: merge_request.source_branch, project: project)
+ end
+
+ it 'does not return protected variables as it is not supported for merge request pipelines' do
+ is_expected.not_to include(variable)
+ end
+ end
+
+ context 'when ref is not protected' do
+ it { is_expected.not_to include(variable) }
+ end
+ end
+ end
+
describe '#scoped_variables_hash' do
context 'when overriding CI variables' do
before do
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index d214fdf369a..59db347582b 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -171,7 +171,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
shared_examples_for 'Scheduling sidekiq worker to flush data to persist store' do
- context 'when new data fullfilled chunk size' do
+ context 'when new data fulfilled chunk size' do
let(:new_data) { 'a' * described_class::CHUNK_SIZE }
it 'schedules trace chunk flush worker' do
@@ -193,7 +193,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end
shared_examples_for 'Scheduling no sidekiq worker' do
- context 'when new data fullfilled chunk size' do
+ context 'when new data fulfilled chunk size' do
let(:new_data) { 'a' * described_class::CHUNK_SIZE }
it 'does not schedule trace chunk flush worker' do
diff --git a/spec/models/ci/group_variable_spec.rb b/spec/models/ci/group_variable_spec.rb
index 1b10501701c..21d96bf3454 100644
--- a/spec/models/ci/group_variable_spec.rb
+++ b/spec/models/ci/group_variable_spec.rb
@@ -5,6 +5,7 @@ describe Ci::GroupVariable do
it { is_expected.to include_module(HasVariable) }
it { is_expected.to include_module(Presentable) }
+ it { is_expected.to include_module(Maskable) }
it { is_expected.to validate_uniqueness_of(:key).scoped_to(:group_id).with_message(/\(\w+\) has already been taken/) }
describe '.unprotected' do
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index ee400bec04b..d0b42d103a5 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -80,11 +80,11 @@ describe Ci::Pipeline, :mailer do
context 'when merge request pipelines exist' do
let!(:merge_request_pipeline_1) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
end
let!(:merge_request_pipeline_2) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
end
let(:merge_request) do
@@ -106,11 +106,11 @@ describe Ci::Pipeline, :mailer do
let!(:branch_pipeline_2) { create(:ci_pipeline, source: :push) }
let!(:merge_request_pipeline_1) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
end
let!(:merge_request_pipeline_2) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
end
let(:merge_request) do
@@ -134,7 +134,7 @@ describe Ci::Pipeline, :mailer do
subject { described_class.detached_merge_request_pipelines(merge_request) }
let!(:pipeline) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request, target_sha: target_sha)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
end
let(:merge_request) { create(:merge_request) }
@@ -157,7 +157,7 @@ describe Ci::Pipeline, :mailer do
subject { pipeline.detached_merge_request_pipeline? }
let!(:pipeline) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request, target_sha: target_sha)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
end
let(:merge_request) { create(:merge_request) }
@@ -176,7 +176,7 @@ describe Ci::Pipeline, :mailer do
subject { described_class.merge_request_pipelines(merge_request) }
let!(:pipeline) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request, target_sha: target_sha)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
end
let(:merge_request) { create(:merge_request) }
@@ -199,7 +199,7 @@ describe Ci::Pipeline, :mailer do
subject { pipeline.merge_request_pipeline? }
let!(:pipeline) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request, target_sha: target_sha)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
end
let(:merge_request) { create(:merge_request) }
@@ -218,7 +218,7 @@ describe Ci::Pipeline, :mailer do
subject { described_class.mergeable_merge_request_pipelines(merge_request) }
let!(:pipeline) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request, target_sha: target_sha)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
end
let(:merge_request) { create(:merge_request) }
@@ -241,7 +241,7 @@ describe Ci::Pipeline, :mailer do
subject { pipeline.mergeable_merge_request_pipeline? }
let!(:pipeline) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request, target_sha: target_sha)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
end
let(:merge_request) { create(:merge_request) }
@@ -256,11 +256,11 @@ describe Ci::Pipeline, :mailer do
end
end
- describe '.merge_request' do
- subject { described_class.merge_request }
+ describe '.merge_request_event' do
+ subject { described_class.merge_request_event }
context 'when there is a merge request pipeline' do
- let!(:pipeline) { create(:ci_pipeline, source: :merge_request, merge_request: merge_request) }
+ let!(:pipeline) { create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request) }
let(:merge_request) { create(:merge_request) }
it 'returns merge request pipeline first' do
@@ -281,7 +281,7 @@ describe Ci::Pipeline, :mailer do
let(:pipeline) { build(:ci_pipeline, source: source, merge_request: merge_request) }
context 'when source is merge request' do
- let(:source) { :merge_request }
+ let(:source) { :merge_request_event }
context 'when merge request is specified' do
let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_project: project, target_branch: 'master') }
@@ -505,7 +505,7 @@ describe Ci::Pipeline, :mailer do
context 'when source is merge request' do
let(:pipeline) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
end
let(:merge_request) do
@@ -513,9 +513,16 @@ describe Ci::Pipeline, :mailer do
source_project: project,
source_branch: 'feature',
target_project: project,
- target_branch: 'master')
+ target_branch: 'master',
+ assignee: assignee,
+ milestone: milestone,
+ labels: labels)
end
+ let(:assignee) { create(:user) }
+ let(:milestone) { create(:milestone, project: project) }
+ let(:labels) { create_list(:label, 2) }
+
it 'exposes merge request pipeline variables' do
expect(subject.to_hash)
.to include(
@@ -531,7 +538,11 @@ describe Ci::Pipeline, :mailer do
'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path,
'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url,
'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s,
- 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => pipeline.source_sha.to_s)
+ 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => pipeline.source_sha.to_s,
+ 'CI_MERGE_REQUEST_TITLE' => merge_request.title,
+ 'CI_MERGE_REQUEST_ASSIGNEES' => assignee.username,
+ 'CI_MERGE_REQUEST_MILESTONE' => milestone.title,
+ 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).join(','))
end
context 'when source project does not exist' do
@@ -547,6 +558,30 @@ describe Ci::Pipeline, :mailer do
CI_MERGE_REQUEST_SOURCE_BRANCH_NAME])
end
end
+
+ context 'without assignee' do
+ let(:assignee) { nil }
+
+ it 'does not expose assignee variable' do
+ expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_ASSIGNEES')
+ end
+ end
+
+ context 'without milestone' do
+ let(:milestone) { nil }
+
+ it 'does not expose milestone variable' do
+ expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_MILESTONE')
+ end
+ end
+
+ context 'without labels' do
+ let(:labels) { [] }
+
+ it 'does not expose labels variable' do
+ expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_LABELS')
+ end
+ end
end
end
@@ -1097,7 +1132,7 @@ describe Ci::Pipeline, :mailer do
context 'when source is merge request' do
let(:pipeline) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
end
let(:merge_request) do
@@ -1147,7 +1182,7 @@ describe Ci::Pipeline, :mailer do
context 'when ref is merge request' do
let(:pipeline) do
create(:ci_pipeline,
- source: :merge_request,
+ source: :merge_request_event,
merge_request: merge_request)
end
@@ -1310,7 +1345,7 @@ describe Ci::Pipeline, :mailer do
context 'when source is merge request' do
let(:pipeline) do
- create(:ci_pipeline, source: :merge_request, merge_request: merge_request)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request)
end
let(:merge_request) do
@@ -2217,7 +2252,7 @@ describe Ci::Pipeline, :mailer do
end
end
- describe "#merge_requests" do
+ describe "#merge_requests_as_head_pipeline" do
let(:project) { create(:project) }
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master', sha: 'a288a022a53a5a944fae87bcec6efc87b7061808') }
@@ -2225,20 +2260,20 @@ describe Ci::Pipeline, :mailer do
allow_any_instance_of(MergeRequest).to receive(:diff_head_sha) { 'a288a022a53a5a944fae87bcec6efc87b7061808' }
merge_request = create(:merge_request, source_project: project, head_pipeline: pipeline, source_branch: pipeline.ref)
- expect(pipeline.merge_requests).to eq([merge_request])
+ expect(pipeline.merge_requests_as_head_pipeline).to eq([merge_request])
end
it "doesn't return merge requests whose source branch doesn't match the pipeline's ref" do
create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master')
- expect(pipeline.merge_requests).to be_empty
+ expect(pipeline.merge_requests_as_head_pipeline).to be_empty
end
it "doesn't return merge requests whose `diff_head_sha` doesn't match the pipeline's SHA" do
create(:merge_request, source_project: project, source_branch: pipeline.ref)
allow_any_instance_of(MergeRequest).to receive(:diff_head_sha) { '97de212e80737a608d939f648d959671fb0a0142b' }
- expect(pipeline.merge_requests).to be_empty
+ expect(pipeline.merge_requests_as_head_pipeline).to be_empty
end
end
@@ -2266,7 +2301,7 @@ describe Ci::Pipeline, :mailer do
let!(:pipeline) do
create(:ci_pipeline,
- source: :merge_request,
+ source: :merge_request_event,
project: pipeline_project,
ref: source_branch,
merge_request: merge_request)
@@ -2289,7 +2324,7 @@ describe Ci::Pipeline, :mailer do
let!(:pipeline_2) do
create(:ci_pipeline,
- source: :merge_request,
+ source: :merge_request_event,
project: pipeline_project,
ref: source_branch,
merge_request: merge_request_2)
diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb
index 875e8b2b682..02c07a2bd83 100644
--- a/spec/models/ci/variable_spec.rb
+++ b/spec/models/ci/variable_spec.rb
@@ -6,6 +6,7 @@ describe Ci::Variable do
describe 'validations' do
it { is_expected.to include_module(HasVariable) }
it { is_expected.to include_module(Presentable) }
+ it { is_expected.to include_module(Maskable) }
it { is_expected.to validate_uniqueness_of(:key).scoped_to(:project_id, :environment_scope).with_message(/\(\w+\) has already been taken/) }
end
diff --git a/spec/models/clusters/applications/jupyter_spec.rb b/spec/models/clusters/applications/jupyter_spec.rb
index 2c22c24c498..6e58f3ad699 100644
--- a/spec/models/clusters/applications/jupyter_spec.rb
+++ b/spec/models/clusters/applications/jupyter_spec.rb
@@ -39,7 +39,7 @@ describe Clusters::Applications::Jupyter do
it 'should be initialized with 4 arguments' do
expect(subject.name).to eq('jupyter')
expect(subject.chart).to eq('jupyter/jupyterhub')
- expect(subject.version).to eq('v0.6')
+ expect(subject.version).to eq('0.9-174bbd5')
expect(subject).to be_rbac
expect(subject.repository).to eq('https://jupyterhub.github.io/helm-chart/')
expect(subject.files).to eq(jupyter.files)
@@ -57,7 +57,7 @@ describe Clusters::Applications::Jupyter do
let(:jupyter) { create(:clusters_applications_jupyter, :errored, version: '0.0.1') }
it 'should be initialized with the locked version' do
- expect(subject.version).to eq('v0.6')
+ expect(subject.version).to eq('0.9-174bbd5')
end
end
end
@@ -77,6 +77,7 @@ describe Clusters::Applications::Jupyter do
expect(values).to include('singleuser')
expect(values).to match(/clientId: '?#{application.oauth_application.uid}/)
expect(values).to match(/callbackUrl: '?#{application.callback_url}/)
+ expect(values).to include("gitlabProjectIdWhitelist:\n - #{application.cluster.project.id}")
end
context 'when cluster belongs to a project' do
diff --git a/spec/models/clusters/applications/knative_spec.rb b/spec/models/clusters/applications/knative_spec.rb
index 006b922ab27..4884a5927fb 100644
--- a/spec/models/clusters/applications/knative_spec.rb
+++ b/spec/models/clusters/applications/knative_spec.rb
@@ -66,9 +66,7 @@ describe Clusters::Applications::Knative do
end
end
- describe '#install_command' do
- subject { knative.install_command }
-
+ shared_examples 'a command' do
it 'should be an instance of Helm::InstallCommand' do
expect(subject).to be_an_instance_of(Gitlab::Kubernetes::Helm::InstallCommand)
end
@@ -76,7 +74,6 @@ describe Clusters::Applications::Knative do
it 'should be initialized with knative arguments' do
expect(subject.name).to eq('knative')
expect(subject.chart).to eq('knative/knative')
- expect(subject.version).to eq('0.2.2')
expect(subject.files).to eq(knative.files)
end
@@ -98,6 +95,27 @@ describe Clusters::Applications::Knative do
end
end
+ describe '#install_command' do
+ subject { knative.install_command }
+
+ it 'should be initialized with latest version' do
+ expect(subject.version).to eq('0.2.2')
+ end
+
+ it_behaves_like 'a command'
+ end
+
+ describe '#update_command' do
+ let!(:current_installed_version) { knative.version = '0.1.0' }
+ subject { knative.update_command }
+
+ it 'should be initialized with current version' do
+ expect(subject.version).to eq(current_installed_version)
+ end
+
+ it_behaves_like 'a command'
+ end
+
describe '#files' do
let(:application) { knative }
let(:values) { subject[:'values.yaml'] }
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index 4068d98d8f7..3b32ca8df05 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -98,6 +98,22 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching
it { expect(kubernetes.save).to be_truthy }
end
+
+ context 'when api_url is localhost' do
+ let(:api_url) { 'http://localhost:22' }
+
+ it { expect(kubernetes.save).to be_falsey }
+
+ context 'Application settings allows local requests' do
+ before do
+ allow(ApplicationSetting)
+ .to receive(:current)
+ .and_return(ApplicationSetting.build_from_defaults(allow_local_requests_from_hooks_and_services: true))
+ end
+
+ it { expect(kubernetes.save).to be_truthy }
+ end
+ end
end
context 'when validates token' do
diff --git a/spec/models/concerns/has_ref_spec.rb b/spec/models/concerns/has_ref_spec.rb
index 8aed72d77a4..8aa2fecb18c 100644
--- a/spec/models/concerns/has_ref_spec.rb
+++ b/spec/models/concerns/has_ref_spec.rb
@@ -16,6 +16,16 @@ describe HasRef do
it 'return true when tag is set to false' do
is_expected.to be_truthy
end
+
+ context 'when it was triggered by merge request' do
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ it 'returns false' do
+ is_expected.to be_falsy
+ end
+ end
end
context 'is not a tag' do
@@ -55,5 +65,15 @@ describe HasRef do
is_expected.to start_with(Gitlab::Git::BRANCH_REF_PREFIX)
end
end
+
+ context 'when it is triggered by a merge request' do
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:pipeline) { merge_request.merge_request_pipelines.first }
+ let(:build) { create(:ci_build, tag: false, pipeline: pipeline) }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
end
end
diff --git a/spec/models/concerns/has_variable_spec.rb b/spec/models/concerns/has_variable_spec.rb
index 3fbe86c5b56..bff96e12ffa 100644
--- a/spec/models/concerns/has_variable_spec.rb
+++ b/spec/models/concerns/has_variable_spec.rb
@@ -57,7 +57,7 @@ describe HasVariable do
describe '#to_runner_variable' do
it 'returns a hash for the runner' do
expect(subject.to_runner_variable)
- .to eq(key: subject.key, value: subject.value, public: false)
+ .to include(key: subject.key, value: subject.value, public: false)
end
end
end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 41159348e04..72c6161424b 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -32,17 +32,56 @@ describe Issuable do
end
describe "Validation" do
- subject { build(:issue) }
+ context 'general validations' do
+ subject { build(:issue) }
- before do
- allow(InternalId).to receive(:generate_next).and_return(nil)
+ before do
+ allow(InternalId).to receive(:generate_next).and_return(nil)
+ end
+
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:iid) }
+ it { is_expected.to validate_presence_of(:author) }
+ it { is_expected.to validate_presence_of(:title) }
+ it { is_expected.to validate_length_of(:title).is_at_most(255) }
end
- it { is_expected.to validate_presence_of(:project) }
- it { is_expected.to validate_presence_of(:iid) }
- it { is_expected.to validate_presence_of(:author) }
- it { is_expected.to validate_presence_of(:title) }
- it { is_expected.to validate_length_of(:title).is_at_most(255) }
+ describe 'milestone' do
+ let(:project) { create(:project) }
+ let(:milestone_id) { create(:milestone, project: project).id }
+ let(:params) do
+ {
+ title: 'something',
+ project: project,
+ author: build(:user),
+ milestone_id: milestone_id
+ }
+ end
+
+ subject { issuable_class.new(params) }
+
+ context 'with correct params' do
+ it { is_expected.to be_valid }
+ end
+
+ context 'with empty string milestone' do
+ let(:milestone_id) { '' }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'with nil milestone id' do
+ let(:milestone_id) { nil }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'with a milestone id from another project' do
+ let(:milestone_id) { create(:milestone).id }
+
+ it { is_expected.to be_invalid }
+ end
+ end
end
describe "Scope" do
@@ -66,6 +105,48 @@ describe Issuable do
end
end
+ describe '#milestone_available?' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+ let(:issue) { create(:issue, project: project) }
+
+ def build_issuable(milestone_id)
+ issuable_class.new(project: project, milestone_id: milestone_id)
+ end
+
+ it 'returns true with a milestone from the issue project' do
+ milestone = create(:milestone, project: project)
+
+ expect(build_issuable(milestone.id).milestone_available?).to be_truthy
+ end
+
+ it 'returns true with a milestone from the issue project group' do
+ milestone = create(:milestone, group: group)
+
+ expect(build_issuable(milestone.id).milestone_available?).to be_truthy
+ end
+
+ it 'returns true with a milestone from the the parent of the issue project group', :nested_groups do
+ parent = create(:group)
+ group.update(parent: parent)
+ milestone = create(:milestone, group: parent)
+
+ expect(build_issuable(milestone.id).milestone_available?).to be_truthy
+ end
+
+ it 'returns false with a milestone from another project' do
+ milestone = create(:milestone)
+
+ expect(build_issuable(milestone.id).milestone_available?).to be_falsey
+ end
+
+ it 'returns false with a milestone from another group' do
+ milestone = create(:milestone, group: create(:group))
+
+ expect(build_issuable(milestone.id).milestone_available?).to be_falsey
+ end
+ end
+
describe ".search" do
let!(:searchable_issue) { create(:issue, title: "Searchable awesome issue") }
let!(:searchable_issue2) { create(:issue, title: 'Aw') }
diff --git a/spec/models/concerns/maskable_spec.rb b/spec/models/concerns/maskable_spec.rb
new file mode 100644
index 00000000000..aeba7ad862f
--- /dev/null
+++ b/spec/models/concerns/maskable_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Maskable do
+ let(:variable) { build(:ci_variable) }
+
+ describe 'masked value validations' do
+ subject { variable }
+
+ context 'when variable is masked' do
+ before do
+ subject.masked = true
+ end
+
+ it { is_expected.not_to allow_value('hello').for(:value) }
+ it { is_expected.not_to allow_value('hello world').for(:value) }
+ it { is_expected.not_to allow_value('hello$VARIABLEworld').for(:value) }
+ it { is_expected.not_to allow_value('hello\rworld').for(:value) }
+ it { is_expected.to allow_value('helloworld').for(:value) }
+ end
+
+ context 'when variable is not masked' do
+ before do
+ subject.masked = false
+ end
+
+ it { is_expected.to allow_value('hello').for(:value) }
+ it { is_expected.to allow_value('hello world').for(:value) }
+ it { is_expected.to allow_value('hello$VARIABLEworld').for(:value) }
+ it { is_expected.to allow_value('hello\rworld').for(:value) }
+ it { is_expected.to allow_value('helloworld').for(:value) }
+ end
+ end
+
+ describe 'REGEX' do
+ subject { Maskable::REGEX }
+
+ it 'does not match strings shorter than 8 letters' do
+ expect(subject.match?('hello')).to eq(false)
+ end
+
+ it 'does not match strings with spaces' do
+ expect(subject.match?('hello world')).to eq(false)
+ end
+
+ it 'does not match strings with shell variables' do
+ expect(subject.match?('hello$VARIABLEworld')).to eq(false)
+ end
+
+ it 'does not match strings with escape characters' do
+ expect(subject.match?('hello\rworld')).to eq(false)
+ end
+
+ it 'does not match strings that span more than one line' do
+ string = <<~EOS
+ hello
+ world
+ EOS
+
+ expect(subject.match?(string)).to eq(false)
+ end
+
+ it 'matches valid strings' do
+ expect(subject.match?('helloworld')).to eq(true)
+ end
+ end
+
+ describe '#to_runner_variable' do
+ subject { variable.to_runner_variable }
+
+ it 'exposes the masked attribute' do
+ expect(subject).to include(:masked)
+ end
+ end
+end
diff --git a/spec/models/concerns/milestoneish_spec.rb b/spec/models/concerns/milestoneish_spec.rb
index 87bf731340f..81ca5b638fe 100644
--- a/spec/models/concerns/milestoneish_spec.rb
+++ b/spec/models/concerns/milestoneish_spec.rb
@@ -9,8 +9,10 @@ describe Milestone, 'Milestoneish' do
let(:admin) { create(:admin) }
let(:project) { create(:project, :public) }
let(:milestone) { create(:milestone, project: project) }
- let!(:issue) { create(:issue, project: project, milestone: milestone) }
- let!(:security_issue_1) { create(:issue, :confidential, project: project, author: author, milestone: milestone) }
+ let(:label1) { create(:label, project: project) }
+ let(:label2) { create(:label, project: project) }
+ let!(:issue) { create(:issue, project: project, milestone: milestone, assignees: [member], labels: [label1]) }
+ let!(:security_issue_1) { create(:issue, :confidential, project: project, author: author, milestone: milestone, labels: [label2]) }
let!(:security_issue_2) { create(:issue, :confidential, project: project, assignees: [assignee], milestone: milestone) }
let!(:closed_issue_1) { create(:issue, :closed, project: project, milestone: milestone) }
let!(:closed_issue_2) { create(:issue, :closed, project: project, milestone: milestone) }
@@ -42,13 +44,102 @@ describe Milestone, 'Milestoneish' do
end
end
+ context 'attributes visibility' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:users) do
+ {
+ anonymous: nil,
+ non_member: non_member,
+ guest: guest,
+ member: member,
+ assignee: assignee
+ }
+ end
+
+ let(:project_visibility_levels) do
+ {
+ public: Gitlab::VisibilityLevel::PUBLIC,
+ internal: Gitlab::VisibilityLevel::INTERNAL,
+ private: Gitlab::VisibilityLevel::PRIVATE
+ }
+ end
+
+ describe '#issue_participants_visible_by_user' do
+ where(:visibility, :user_role, :result) do
+ :public | nil | [:member]
+ :public | :non_member | [:member]
+ :public | :guest | [:member]
+ :public | :member | [:member, :assignee]
+ :internal | nil | []
+ :internal | :non_member | [:member]
+ :internal | :guest | [:member]
+ :internal | :member | [:member, :assignee]
+ :private | nil | []
+ :private | :non_member | []
+ :private | :guest | [:member]
+ :private | :member | [:member, :assignee]
+ end
+
+ with_them do
+ before do
+ project.update(visibility_level: project_visibility_levels[visibility])
+ end
+
+ it 'returns the proper participants' do
+ user = users[user_role]
+ participants = result.map { |role| users[role] }
+
+ expect(milestone.issue_participants_visible_by_user(user)).to match_array(participants)
+ end
+ end
+ end
+
+ describe '#issue_labels_visible_by_user' do
+ let(:labels) do
+ {
+ label1: label1,
+ label2: label2
+ }
+ end
+
+ where(:visibility, :user_role, :result) do
+ :public | nil | [:label1]
+ :public | :non_member | [:label1]
+ :public | :guest | [:label1]
+ :public | :member | [:label1, :label2]
+ :internal | nil | []
+ :internal | :non_member | [:label1]
+ :internal | :guest | [:label1]
+ :internal | :member | [:label1, :label2]
+ :private | nil | []
+ :private | :non_member | []
+ :private | :guest | [:label1]
+ :private | :member | [:label1, :label2]
+ end
+
+ with_them do
+ before do
+ project.update(visibility_level: project_visibility_levels[visibility])
+ end
+
+ it 'returns the proper participants' do
+ user = users[user_role]
+ expected_labels = result.map { |label| labels[label] }
+
+ expect(milestone.issue_labels_visible_by_user(user)).to match_array(expected_labels)
+ end
+ end
+ end
+ end
+
describe '#sorted_merge_requests' do
it 'sorts merge requests by label priority' do
merge_request_1 = create(:labeled_merge_request, labels: [label_2], source_project: project, source_branch: 'branch_1', milestone: milestone)
merge_request_2 = create(:labeled_merge_request, labels: [label_1], source_project: project, source_branch: 'branch_2', milestone: milestone)
merge_request_3 = create(:labeled_merge_request, labels: [label_3], source_project: project, source_branch: 'branch_3', milestone: milestone)
- merge_requests = milestone.sorted_merge_requests
+ merge_requests = milestone.sorted_merge_requests(member)
expect(merge_requests.first).to eq(merge_request_2)
expect(merge_requests.second).to eq(merge_request_1)
@@ -56,6 +147,51 @@ describe Milestone, 'Milestoneish' do
end
end
+ describe '#merge_requests_visible_to_user' do
+ let(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) }
+
+ context 'when project is private' do
+ before do
+ project.update(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ it 'does not return any merge request for a non member' do
+ merge_requests = milestone.merge_requests_visible_to_user(non_member)
+ expect(merge_requests).to be_empty
+ end
+
+ it 'returns milestone merge requests for a member' do
+ merge_requests = milestone.merge_requests_visible_to_user(member)
+ expect(merge_requests).to contain_exactly(merge_request)
+ end
+ end
+
+ context 'when project is public' do
+ context 'when merge requests are available to anyone' do
+ it 'returns milestone merge requests for a non member' do
+ merge_requests = milestone.merge_requests_visible_to_user(non_member)
+ expect(merge_requests).to contain_exactly(merge_request)
+ end
+ end
+
+ context 'when merge requests are available to project members' do
+ before do
+ project.project_feature.update(merge_requests_access_level: ProjectFeature::PRIVATE)
+ end
+
+ it 'does not return any merge request for a non member' do
+ merge_requests = milestone.merge_requests_visible_to_user(non_member)
+ expect(merge_requests).to be_empty
+ end
+
+ it 'returns milestone merge requests for a member' do
+ merge_requests = milestone.merge_requests_visible_to_user(member)
+ expect(merge_requests).to contain_exactly(merge_request)
+ end
+ end
+ end
+ end
+
describe '#closed_items_count' do
it 'does not count confidential issues for non project members' do
expect(milestone.closed_items_count(non_member)).to eq 2
diff --git a/spec/models/concerns/sortable_spec.rb b/spec/models/concerns/sortable_spec.rb
index 39c16ae60af..0a9d2021a19 100644
--- a/spec/models/concerns/sortable_spec.rb
+++ b/spec/models/concerns/sortable_spec.rb
@@ -99,7 +99,7 @@ describe Sortable do
expect(ordered_group_names('id_desc')).to eq(%w(bbb BB AAA aa))
end
- it 'sorts groups by name via case-insentitive comparision' do
+ it 'sorts groups by name via case-insensitive comparision' do
expect(ordered_group_names('name_asc')).to eq(%w(aa AAA BB bbb))
expect(ordered_group_names('name_desc')).to eq(%w(bbb BB AAA aa))
end
diff --git a/spec/models/concerns/token_authenticatable_strategies/base_spec.rb b/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
index 6605f1f5a5f..2a0182b4294 100644
--- a/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
+++ b/spec/models/concerns/token_authenticatable_strategies/base_spec.rb
@@ -15,7 +15,7 @@ describe TokenAuthenticatableStrategies::Base do
context 'when encrypted strategy is specified' do
it 'fabricates encrypted strategy object' do
- strategy = described_class.fabricate(instance, field, encrypted: true)
+ strategy = described_class.fabricate(instance, field, encrypted: :required)
expect(strategy).to be_a TokenAuthenticatableStrategies::Encrypted
end
@@ -23,7 +23,7 @@ describe TokenAuthenticatableStrategies::Base do
context 'when no strategy is specified' do
it 'fabricates insecure strategy object' do
- strategy = described_class.fabricate(instance, field, something: true)
+ strategy = described_class.fabricate(instance, field, something: :required)
expect(strategy).to be_a TokenAuthenticatableStrategies::Insecure
end
@@ -31,35 +31,9 @@ describe TokenAuthenticatableStrategies::Base do
context 'when incompatible options are provided' do
it 'raises an error' do
- expect { described_class.fabricate(instance, field, digest: true, encrypted: true) }
+ expect { described_class.fabricate(instance, field, digest: true, encrypted: :required) }
.to raise_error ArgumentError
end
end
end
-
- describe '#fallback?' do
- context 'when fallback is set' do
- it 'recognizes fallback setting' do
- strategy = described_class.new(instance, field, fallback: true)
-
- expect(strategy.fallback?).to be true
- end
- end
-
- context 'when fallback is not a valid value' do
- it 'raises an error' do
- strategy = described_class.new(instance, field, fallback: 'something')
-
- expect { strategy.fallback? }.to raise_error ArgumentError
- end
- end
-
- context 'when fallback is not set' do
- it 'raises an error' do
- strategy = described_class.new(instance, field, {})
-
- expect(strategy.fallback?).to eq false
- end
- end
- end
end
diff --git a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
index 93cab80cb1f..ca38f86c5ab 100644
--- a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
+++ b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
@@ -12,19 +12,9 @@ describe TokenAuthenticatableStrategies::Encrypted do
described_class.new(model, 'some_field', options)
end
- describe '.new' do
- context 'when fallback and migration strategies are set' do
- let(:options) { { fallback: true, migrating: true } }
-
- it 'raises an error' do
- expect { subject }.to raise_error ArgumentError, /not compatible/
- end
- end
- end
-
describe '#find_token_authenticatable' do
- context 'when using fallback strategy' do
- let(:options) { { fallback: true } }
+ context 'when using optional strategy' do
+ let(:options) { { encrypted: :optional } }
it 'finds the encrypted resource by cleartext' do
allow(model).to receive(:find_by)
@@ -50,7 +40,7 @@ describe TokenAuthenticatableStrategies::Encrypted do
end
context 'when using migration strategy' do
- let(:options) { { migrating: true } }
+ let(:options) { { encrypted: :migrating } }
it 'finds the cleartext resource by cleartext' do
allow(model).to receive(:find_by)
@@ -73,8 +63,8 @@ describe TokenAuthenticatableStrategies::Encrypted do
end
describe '#get_token' do
- context 'when using fallback strategy' do
- let(:options) { { fallback: true } }
+ context 'when using optional strategy' do
+ let(:options) { { encrypted: :optional } }
it 'returns decrypted token when an encrypted token is present' do
allow(instance).to receive(:read_attribute)
@@ -98,7 +88,7 @@ describe TokenAuthenticatableStrategies::Encrypted do
end
context 'when using migration strategy' do
- let(:options) { { migrating: true } }
+ let(:options) { { encrypted: :migrating } }
it 'returns cleartext token when an encrypted token is present' do
allow(instance).to receive(:read_attribute)
@@ -127,8 +117,8 @@ describe TokenAuthenticatableStrategies::Encrypted do
end
describe '#set_token' do
- context 'when using fallback strategy' do
- let(:options) { { fallback: true } }
+ context 'when using optional strategy' do
+ let(:options) { { encrypted: :optional } }
it 'writes encrypted token and removes plaintext token and returns it' do
expect(instance).to receive(:[]=)
@@ -141,7 +131,7 @@ describe TokenAuthenticatableStrategies::Encrypted do
end
context 'when using migration strategy' do
- let(:options) { { migrating: true } }
+ let(:options) { { encrypted: :migrating } }
it 'writes encrypted token and writes plaintext token' do
expect(instance).to receive(:[]=)
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index 076ccc96041..cbde13a2c7a 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -62,11 +62,32 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
end
context 'URL path' do
- it 'fails validation with wrong path' do
+ it 'fails validation without api/0/projects' do
subject.api_url = 'http://gitlab.com/project1/something'
expect(subject).not_to be_valid
- expect(subject.errors.messages[:api_url]).to include('path needs to start with /api/0/projects')
+ expect(subject.errors.messages[:api_url]).to include('is invalid')
+ end
+
+ it 'fails validation without org and project slugs' do
+ subject.api_url = 'http://gitlab.com/api/0/projects/'
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.messages[:project]).to include('is a required field')
+ end
+
+ it 'fails validation when api_url has extra parts' do
+ subject.api_url = 'http://gitlab.com/api/0/projects/org/proj/something'
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.messages[:api_url]).to include("is invalid")
+ end
+
+ it 'fails validation when api_url has less parts' do
+ subject.api_url = 'http://gitlab.com/api/0/projects/org'
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.messages[:api_url]).to include("is invalid")
end
it 'passes validation with correct path' do
@@ -275,6 +296,16 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
expect(api_url).to eq(':::')
end
+
+ it 'returns nil when api_host is blank' do
+ api_url = described_class.build_api_url_from(
+ api_host: '',
+ organization_slug: 'org-slug',
+ project_slug: 'proj-slug'
+ )
+
+ expect(api_url).to be_nil
+ end
end
describe '#api_host' do
diff --git a/spec/models/issue/metrics_spec.rb b/spec/models/issue/metrics_spec.rb
index 1bf0ecb98ad..b7291eebe64 100644
--- a/spec/models/issue/metrics_spec.rb
+++ b/spec/models/issue/metrics_spec.rb
@@ -9,7 +9,7 @@ describe Issue::Metrics do
context "milestones" do
it "records the first time an issue is associated with a milestone" do
time = Time.now
- Timecop.freeze(time) { subject.update(milestone: create(:milestone)) }
+ Timecop.freeze(time) { subject.update(milestone: create(:milestone, project: project)) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -18,9 +18,9 @@ describe Issue::Metrics do
it "does not record the second time an issue is associated with a milestone" do
time = Time.now
- Timecop.freeze(time) { subject.update(milestone: create(:milestone)) }
+ Timecop.freeze(time) { subject.update(milestone: create(:milestone, project: project)) }
Timecop.freeze(time + 2.hours) { subject.update(milestone: nil) }
- Timecop.freeze(time + 6.hours) { subject.update(milestone: create(:milestone)) }
+ Timecop.freeze(time + 6.hours) { subject.update(milestone: create(:milestone, project: project)) }
metrics = subject.metrics
expect(metrics).to be_present
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 1849d3bac12..e530e0302f5 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -3,6 +3,18 @@ require 'spec_helper'
describe MergeRequestDiff do
let(:diff_with_commits) { create(:merge_request).merge_request_diff }
+ describe 'validations' do
+ subject { diff_with_commits }
+
+ it 'checks sha format of base_commit_sha, head_commit_sha and start_commit_sha' do
+ subject.base_commit_sha = subject.head_commit_sha = subject.start_commit_sha = 'foobar'
+
+ expect(subject.valid?).to be false
+ expect(subject.errors.count).to eq 3
+ expect(subject.errors).to all(include('is not a valid SHA'))
+ end
+ end
+
describe 'create new record' do
subject { diff_with_commits }
@@ -78,7 +90,7 @@ describe MergeRequestDiff do
it 'returns persisted diffs if cannot compare with diff refs' do
expect(diff).to receive(:load_diffs).and_call_original
- diff.update!(head_commit_sha: 'invalid-sha')
+ diff.update!(head_commit_sha: Digest::SHA1.hexdigest(SecureRandom.hex))
diff.diffs.diff_files
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 82a853a23b9..07cb4c9c1e3 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1333,7 +1333,7 @@ describe MergeRequest do
let!(:merge_request_pipeline) do
create(:ci_pipeline,
- source: :merge_request,
+ source: :merge_request_event,
project: project,
ref: source_ref,
sha: shas.second,
@@ -1372,7 +1372,7 @@ describe MergeRequest do
let!(:merge_request_pipeline_2) do
create(:ci_pipeline,
- source: :merge_request,
+ source: :merge_request_event,
project: project,
ref: source_ref,
sha: shas.first,
@@ -1399,7 +1399,7 @@ describe MergeRequest do
let!(:merge_request_pipeline_2) do
create(:ci_pipeline,
- source: :merge_request,
+ source: :merge_request_event,
project: project,
ref: source_ref,
sha: shas.first,
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index af7e3d3a6c9..77b7042424c 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -182,7 +182,7 @@ describe Milestone do
describe '#total_items_count' do
before do
create :closed_issue, milestone: milestone, project: project
- create :merge_request, milestone: milestone
+ create :merge_request, milestone: milestone, source_project: project
end
it 'returns total count of issues and merge requests assigned to milestone' do
@@ -192,10 +192,10 @@ describe Milestone do
describe '#can_be_closed?' do
before do
- milestone = create :milestone
- create :closed_issue, milestone: milestone
+ milestone = create :milestone, project: project
+ create :closed_issue, milestone: milestone, project: project
- create :issue
+ create :issue, project: project
end
it 'returns true if milestone active and all nested issues closed' do
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index 788b3179b01..5428fcb1271 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -177,9 +177,10 @@ describe JiraService do
expect(WebMock).to have_requested(:post, @remote_link_url).with(
body: hash_including(
GlobalID: 'GitLab',
+ relationship: 'mentioned on',
object: {
url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/#{commit_id}",
- title: "GitLab: Solved by commit #{commit_id}.",
+ title: "Solved by commit #{commit_id}.",
icon: { title: 'GitLab', url16x16: favicon_path },
status: { resolved: true }
}
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index b6cf4c72450..e9c7c94ad70 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -33,18 +33,38 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
describe 'Validations' do
context 'when manual_configuration is enabled' do
before do
- subject.manual_configuration = true
+ service.manual_configuration = true
end
- it { is_expected.to validate_presence_of(:api_url) }
+ it 'validates presence of api_url' do
+ expect(service).to validate_presence_of(:api_url)
+ end
end
context 'when manual configuration is disabled' do
before do
- subject.manual_configuration = false
+ service.manual_configuration = false
end
- it { is_expected.not_to validate_presence_of(:api_url) }
+ it 'does not validate presence of api_url' do
+ expect(service).not_to validate_presence_of(:api_url)
+ end
+ end
+
+ context 'when the api_url domain points to localhost or local network' do
+ let(:domain) { Addressable::URI.parse(service.api_url).hostname }
+
+ it 'cannot query' do
+ expect(service.can_query?).to be true
+
+ aggregate_failures do
+ ['127.0.0.1', '192.168.2.3'].each do |url|
+ allow(Addrinfo).to receive(:getaddrinfo).with(domain, any_args).and_return([Addrinfo.tcp(url, 80)])
+
+ expect(service.can_query?).to be false
+ end
+ end
+ end
end
end
@@ -74,30 +94,35 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
end
describe '#prometheus_client' do
+ let(:api_url) { 'http://some_url' }
+
+ before do
+ service.active = true
+ service.api_url = api_url
+ service.manual_configuration = manual_configuration
+ end
+
context 'manual configuration is enabled' do
- let(:api_url) { 'http://some_url' }
+ let(:manual_configuration) { true }
- before do
- subject.active = true
- subject.manual_configuration = true
- subject.api_url = api_url
+ it 'returns rest client from api_url' do
+ expect(service.prometheus_client.url).to eq(api_url)
end
- it 'returns rest client from api_url' do
- expect(subject.prometheus_client.url).to eq(api_url)
+ it 'calls valid?' do
+ allow(service).to receive(:valid?).and_call_original
+
+ expect(service.prometheus_client).not_to be_nil
+
+ expect(service).to have_received(:valid?)
end
end
context 'manual configuration is disabled' do
- let(:api_url) { 'http://some_url' }
-
- before do
- subject.manual_configuration = false
- subject.api_url = api_url
- end
+ let(:manual_configuration) { false }
it 'no client provided' do
- expect(subject.prometheus_client).to be_nil
+ expect(service.prometheus_client).to be_nil
end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 9fb0d04ca9e..b2392f9521f 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -3430,28 +3430,42 @@ describe Project do
project.migrate_to_hashed_storage!
end
- it 'schedules ProjectMigrateHashedStorageWorker with delayed start when the project repo is in use' do
+ it 'schedules HashedStorage::ProjectMigrateWorker with delayed start when the project repo is in use' do
Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: false)).increase
- expect(ProjectMigrateHashedStorageWorker).to receive(:perform_in)
+ expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_in)
project.migrate_to_hashed_storage!
end
- it 'schedules ProjectMigrateHashedStorageWorker with delayed start when the wiki repo is in use' do
+ it 'schedules HashedStorage::ProjectMigrateWorker with delayed start when the wiki repo is in use' do
Gitlab::ReferenceCounter.new(project.gl_repository(is_wiki: true)).increase
- expect(ProjectMigrateHashedStorageWorker).to receive(:perform_in)
+ expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_in)
project.migrate_to_hashed_storage!
end
- it 'schedules ProjectMigrateHashedStorageWorker' do
- expect(ProjectMigrateHashedStorageWorker).to receive(:perform_async).with(project.id)
+ it 'schedules HashedStorage::ProjectMigrateWorker' do
+ expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_async).with(project.id)
project.migrate_to_hashed_storage!
end
end
+
+ describe '#rollback_to_legacy_storage!' do
+ let(:project) { create(:project, :empty_repo, :legacy_storage) }
+
+ it 'returns nil' do
+ expect(project.rollback_to_legacy_storage!).to be_nil
+ end
+
+ it 'does not run validations' do
+ expect(project).not_to receive(:valid?)
+
+ project.rollback_to_legacy_storage!
+ end
+ end
end
context 'hashed storage' do
@@ -3527,11 +3541,35 @@ describe Project do
project = create(:project, storage_version: 1, skip_disk_validation: true)
Sidekiq::Testing.fake! do
- expect { project.migrate_to_hashed_storage! }.to change(ProjectMigrateHashedStorageWorker.jobs, :size).by(1)
+ expect { project.migrate_to_hashed_storage! }.to change(HashedStorage::ProjectMigrateWorker.jobs, :size).by(1)
end
end
end
end
+
+ describe '#rollback_to_legacy_storage!' do
+ let(:project) { create(:project, :repository, skip_disk_validation: true) }
+
+ it 'returns true' do
+ expect(project.rollback_to_legacy_storage!).to be_truthy
+ end
+
+ it 'does not run validations' do
+ expect(project).not_to receive(:valid?)
+
+ project.rollback_to_legacy_storage!
+ end
+
+ it 'does not flag as read-only' do
+ expect { project.rollback_to_legacy_storage! }.not_to change { project.repository_read_only }
+ end
+
+ it 'enqueues a job' do
+ Sidekiq::Testing.fake! do
+ expect { project.rollback_to_legacy_storage! }.to change(HashedStorage::ProjectRollbackWorker.jobs, :size).by(1)
+ end
+ end
+ end
end
describe '#gl_repository' do
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 4c677200ae2..dafe7646366 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -190,4 +190,32 @@ describe ProtectedBranch do
end
end
end
+
+ describe '#any_protected?' do
+ context 'existing project' do
+ let(:project) { create(:project, :repository) }
+
+ it 'returns true when any of the branch names match a protected branch via direct match' do
+ create(:protected_branch, project: project, name: 'foo')
+
+ expect(described_class.any_protected?(project, ['foo', 'production/some-branch'])).to eq(true)
+ end
+
+ it 'returns true when any of the branch matches a protected branch via wildcard match' do
+ create(:protected_branch, project: project, name: 'production/*')
+
+ expect(described_class.any_protected?(project, ['foo', 'production/some-branch'])).to eq(true)
+ end
+
+ it 'returns false when none of branches does not match a protected branch via direct match' do
+ expect(described_class.any_protected?(project, ['foo'])).to eq(false)
+ end
+
+ it 'returns false when none of the branches does not match a protected branch via wildcard match' do
+ create(:protected_branch, project: project, name: 'production/*')
+
+ expect(described_class.any_protected?(project, ['staging/some-branch'])).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 17201d8b90a..70630467d24 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -2237,7 +2237,7 @@ describe Repository do
rugged.references.create("refs/remotes/#{remote_name}/#{branch_name}", target.id)
end
- describe '#ancestor?' do
+ shared_examples '#ancestor?' do
let(:commit) { repository.commit }
let(:ancestor) { commit.parents.first }
@@ -2261,6 +2261,20 @@ describe Repository do
end
end
+ describe '#ancestor? with Gitaly enabled' do
+ it_behaves_like "#ancestor?"
+ end
+
+ describe '#ancestor? with Rugged enabled', :enable_rugged do
+ it 'calls out to the Rugged implementation' do
+ allow_any_instance_of(Rugged).to receive(:merge_base).with(repository.commit.id, Gitlab::Git::BLANK_SHA).and_call_original
+
+ repository.ancestor?(repository.commit.id, Gitlab::Git::BLANK_SHA)
+ end
+
+ it_behaves_like '#ancestor?'
+ end
+
describe '#archive_metadata' do
let(:ref) { 'master' }
let(:storage_path) { '/tmp' }
diff --git a/spec/policies/commit_policy_spec.rb b/spec/policies/commit_policy_spec.rb
new file mode 100644
index 00000000000..41f6fb08426
--- /dev/null
+++ b/spec/policies/commit_policy_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe CommitPolicy do
+ describe '#rules' do
+ let(:user) { create(:user) }
+ let(:commit) { project.repository.head_commit }
+ let(:policy) { described_class.new(user, commit) }
+
+ context 'when project is public' do
+ let(:project) { create(:project, :public, :repository) }
+
+ it 'can read commit and create a note' do
+ expect(policy).to be_allowed(:read_commit)
+ end
+
+ context 'when repository access level is private' do
+ let(:project) { create(:project, :public, :repository, :repository_private) }
+
+ it 'can not read commit and create a note' do
+ expect(policy).to be_disallowed(:read_commit)
+ end
+
+ context 'when the user is a project member' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'can read commit and create a note' do
+ expect(policy).to be_allowed(:read_commit)
+ end
+ end
+ end
+ end
+
+ context 'when project is private' do
+ let(:project) { create(:project, :private, :repository) }
+
+ it 'can not read commit and create a note' do
+ expect(policy).to be_disallowed(:read_commit)
+ end
+
+ context 'when the user is a project member' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'can read commit and create a note' do
+ expect(policy).to be_allowed(:read_commit)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 30d68e7dc9d..12be3927e18 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -181,6 +181,18 @@ describe GlobalPolicy do
end
end
+ describe 'read instance metadata' do
+ context 'regular user' do
+ it { is_expected.to be_allowed(:read_instance_metadata) }
+ end
+
+ context 'anonymous' do
+ let(:current_user) { nil }
+
+ it { is_expected.not_to be_allowed(:read_instance_metadata) }
+ end
+ end
+
describe 'read instance statistics' do
context 'regular user' do
it { is_expected.to be_allowed(:read_instance_statistics) }
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index af6d6f084a9..0ad50c6f91f 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -74,6 +74,38 @@ describe GroupPolicy do
end
end
+ context 'with no user and public project' do
+ let(:project) { create(:project, :public) }
+ let(:user) { create(:user) }
+ let(:current_user) { nil }
+
+ before do
+ Projects::GroupLinks::CreateService.new(
+ project,
+ user,
+ link_group_access: ProjectGroupLink::DEVELOPER
+ ).execute(group)
+ end
+
+ it { expect_disallowed(:read_group) }
+ end
+
+ context 'with foreign user and public project' do
+ let(:project) { create(:project, :public) }
+ let(:user) { create(:user) }
+ let(:current_user) { create(:user) }
+
+ before do
+ Projects::GroupLinks::CreateService.new(
+ project,
+ user,
+ link_group_access: ProjectGroupLink::DEVELOPER
+ ).execute(group)
+ end
+
+ it { expect_disallowed(:read_group) }
+ end
+
context 'has projects' do
let(:current_user) { create(:user) }
let(:project) { create(:project, namespace: group) }
@@ -82,17 +114,13 @@ describe GroupPolicy do
project.add_developer(current_user)
end
- it do
- expect_allowed(:read_group, :read_list, :read_label)
- end
+ it { expect_allowed(:read_label, :read_list) }
context 'in subgroups', :nested_groups do
let(:subgroup) { create(:group, :private, parent: group) }
let(:project) { create(:project, namespace: subgroup) }
- it do
- expect_allowed(:read_group, :read_list, :read_label)
- end
+ it { expect_allowed(:read_label, :read_list) }
end
end
diff --git a/spec/policies/note_policy_spec.rb b/spec/policies/note_policy_spec.rb
index 0e848c74659..4be7a0266d1 100644
--- a/spec/policies/note_policy_spec.rb
+++ b/spec/policies/note_policy_spec.rb
@@ -1,28 +1,15 @@
require 'spec_helper'
-describe NotePolicy, mdoels: true do
+describe NotePolicy do
describe '#rules' do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
-
- def policies(noteable = nil)
- return @policies if @policies
-
- noteable ||= issue
- note = if noteable.is_a?(Commit)
- create(:note_on_commit, commit_id: noteable.id, author: user, project: project)
- else
- create(:note, noteable: noteable, author: user, project: project)
- end
-
- @policies = described_class.new(user, note)
- end
+ let(:noteable) { issue }
+ let(:policy) { described_class.new(user, note) }
+ let(:note) { create(:note, noteable: noteable, author: user, project: project) }
shared_examples_for 'a discussion with a private noteable' do
- let(:noteable) { issue }
- let(:policy) { policies(noteable) }
-
context 'when the note author can no longer see the noteable' do
it 'can not edit nor read the note' do
expect(policy).to be_disallowed(:admin_note)
@@ -46,12 +33,21 @@ describe NotePolicy, mdoels: true do
end
end
- context 'when the project is private' do
- let(:project) { create(:project, :private, :repository) }
+ context 'when the noteable is a commit' do
+ let(:commit) { project.repository.head_commit }
+ let(:note) { create(:note_on_commit, commit_id: commit.id, author: user, project: project) }
+
+ context 'when the project is private' do
+ let(:project) { create(:project, :private, :repository) }
+
+ it_behaves_like 'a discussion with a private noteable'
+ end
- context 'when the noteable is a commit' do
- it_behaves_like 'a discussion with a private noteable' do
- let(:noteable) { project.repository.head_commit }
+ context 'when the project is public' do
+ context 'when repository access level is private' do
+ let(:project) { create(:project, :public, :repository, :repository_private) }
+
+ it_behaves_like 'a discussion with a private noteable'
end
end
end
@@ -59,44 +55,44 @@ describe NotePolicy, mdoels: true do
context 'when the project is public' do
context 'when the note author is not a project member' do
it 'can edit a note' do
- expect(policies).to be_allowed(:admin_note)
- expect(policies).to be_allowed(:resolve_note)
- expect(policies).to be_allowed(:read_note)
+ expect(policy).to be_allowed(:admin_note)
+ expect(policy).to be_allowed(:resolve_note)
+ expect(policy).to be_allowed(:read_note)
end
end
context 'when the noteable is a project snippet' do
- it 'can edit note' do
- policies = policies(create(:project_snippet, :public, project: project))
+ let(:noteable) { create(:project_snippet, :public, project: project) }
- expect(policies).to be_allowed(:admin_note)
- expect(policies).to be_allowed(:resolve_note)
- expect(policies).to be_allowed(:read_note)
+ it 'can edit note' do
+ expect(policy).to be_allowed(:admin_note)
+ expect(policy).to be_allowed(:resolve_note)
+ expect(policy).to be_allowed(:read_note)
end
context 'when it is private' do
- it_behaves_like 'a discussion with a private noteable' do
- let(:noteable) { create(:project_snippet, :private, project: project) }
- end
+ let(:noteable) { create(:project_snippet, :private, project: project) }
+
+ it_behaves_like 'a discussion with a private noteable'
end
end
context 'when the noteable is a personal snippet' do
- it 'can edit note' do
- policies = policies(create(:personal_snippet, :public))
+ let(:noteable) { create(:personal_snippet, :public) }
- expect(policies).to be_allowed(:admin_note)
- expect(policies).to be_allowed(:resolve_note)
- expect(policies).to be_allowed(:read_note)
+ it 'can edit note' do
+ expect(policy).to be_allowed(:admin_note)
+ expect(policy).to be_allowed(:resolve_note)
+ expect(policy).to be_allowed(:read_note)
end
context 'when it is private' do
- it 'can not edit nor read the note' do
- policies = policies(create(:personal_snippet, :private))
+ let(:noteable) { create(:personal_snippet, :private) }
- expect(policies).to be_disallowed(:admin_note)
- expect(policies).to be_disallowed(:resolve_note)
- expect(policies).to be_disallowed(:read_note)
+ it 'can not edit nor read the note' do
+ expect(policy).to be_disallowed(:admin_note)
+ expect(policy).to be_disallowed(:resolve_note)
+ expect(policy).to be_disallowed(:read_note)
end
end
end
@@ -120,20 +116,20 @@ describe NotePolicy, mdoels: true do
end
it 'can edit a note' do
- expect(policies).to be_allowed(:admin_note)
- expect(policies).to be_allowed(:resolve_note)
- expect(policies).to be_allowed(:read_note)
+ expect(policy).to be_allowed(:admin_note)
+ expect(policy).to be_allowed(:resolve_note)
+ expect(policy).to be_allowed(:read_note)
end
end
context 'when the note author is not a project member' do
it 'can not edit a note' do
- expect(policies).to be_disallowed(:admin_note)
- expect(policies).to be_disallowed(:resolve_note)
+ expect(policy).to be_disallowed(:admin_note)
+ expect(policy).to be_disallowed(:resolve_note)
end
it 'can read a note' do
- expect(policies).to be_allowed(:read_note)
+ expect(policy).to be_allowed(:read_note)
end
end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 997bdc82af6..772d1fbee2b 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -45,8 +45,7 @@ describe ProjectPolicy do
let(:base_maintainer_permissions) do
%i[
push_to_delete_protected_branch update_project_snippet update_environment
- update_deployment admin_project_snippet
- admin_project_member admin_note admin_wiki admin_project
+ update_deployment admin_project_snippet admin_project_member admin_note admin_wiki admin_project
admin_commit_status admin_build admin_container_image
admin_pipeline admin_environment admin_deployment destroy_release add_cluster
daily_statistics
@@ -131,22 +130,26 @@ describe ProjectPolicy do
subject { described_class.new(owner, project) }
context 'when the feature is disabled' do
- it 'does not include the issues permissions' do
+ before do
project.issues_enabled = false
project.save!
+ end
+ it 'does not include the issues permissions' do
expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue
end
- end
- context 'when the feature is disabled and external tracker configured' do
- it 'does not include the issues permissions' do
- create(:jira_service, project: project)
+ it 'disables boards and lists permissions' do
+ expect_disallowed :read_board, :create_board, :update_board, :admin_board
+ expect_disallowed :read_list, :create_list, :update_list, :admin_list
+ end
- project.issues_enabled = false
- project.save!
+ context 'when external tracker configured' do
+ it 'does not include the issues permissions' do
+ create(:jira_service, project: project)
- expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue
+ expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue
+ end
end
end
end
diff --git a/spec/presenters/blobs/unfold_presenter_spec.rb b/spec/presenters/blobs/unfold_presenter_spec.rb
new file mode 100644
index 00000000000..7ece5f623ce
--- /dev/null
+++ b/spec/presenters/blobs/unfold_presenter_spec.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Blobs::UnfoldPresenter do
+ include FakeBlobHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:blob) { fake_blob(path: 'foo', data: "1\n2\n3") }
+ let(:subject) { described_class.new(blob, params) }
+
+ describe '#initialize' do
+ context 'when full is false' do
+ let(:params) { { full: false, since: 2, to: 3, bottom: false, offset: 1, indent: 1 } }
+
+ it 'sets attributes' do
+ result = subject
+
+ expect(result.full?).to eq(false)
+ expect(result.since).to eq(2)
+ expect(result.to).to eq(3)
+ expect(result.bottom).to eq(false)
+ expect(result.offset).to eq(1)
+ expect(result.indent).to eq(1)
+ end
+ end
+
+ context 'when full is true' do
+ let(:params) { { full: true, since: 2, to: 3, bottom: false, offset: 1, indent: 1 } }
+
+ it 'sets other attributes' do
+ result = subject
+
+ expect(result.full?).to eq(true)
+ expect(result.since).to eq(1)
+ expect(result.to).to eq(blob.lines.size)
+ expect(result.bottom).to eq(false)
+ expect(result.offset).to eq(0)
+ expect(result.indent).to eq(0)
+ end
+ end
+ end
+
+ describe '#diff_lines' do
+ let(:total_lines) { 50 }
+ let(:blob) { fake_blob(path: 'foo', data: (1..total_lines).to_a.join("\n")) }
+
+ context 'when "full" is true' do
+ let(:params) { { full: true } }
+
+ it 'returns all lines' do
+ lines = subject.diff_lines
+
+ expect(lines.size).to eq(total_lines)
+
+ lines.each.with_index do |line, index|
+ expect(line.text).to include("LC#{index + 1}")
+ expect(line.text).to eq(line.rich_text)
+ expect(line.type).to be_nil
+ end
+ end
+
+ context 'when last line is empty' do
+ let(:blob) { fake_blob(path: 'foo', data: "1\n2\n") }
+
+ it 'disregards last line' do
+ lines = subject.diff_lines
+
+ expect(lines.size).to eq(2)
+ end
+ end
+ end
+
+ context 'when "since" is equal to 1' do
+ let(:params) { { since: 1, to: 10, offset: 10 } }
+
+ it 'does not add top match line' do
+ line = subject.diff_lines.first
+
+ expect(line.type).to be_nil
+ end
+ end
+
+ context 'when since is greater than 1' do
+ let(:params) { { since: 5, to: 10, offset: 10 } }
+
+ it 'adds top match line' do
+ line = subject.diff_lines.first
+
+ expect(line.type).to eq('match')
+ expect(line.old_pos).to eq(5)
+ expect(line.new_pos).to eq(5)
+ end
+ end
+
+ context 'when "to" is less than blob size' do
+ let(:params) { { since: 1, to: 5, offset: 10, bottom: true } }
+
+ it 'adds bottom match line' do
+ line = subject.diff_lines.last
+
+ expect(line.type).to eq('match')
+ expect(line.old_pos).to eq(-5)
+ expect(line.new_pos).to eq(5)
+ end
+ end
+
+ context 'when "to" is equal to blob size' do
+ let(:params) { { since: 1, to: total_lines, offset: 10, bottom: true } }
+
+ it 'does not add bottom match line' do
+ line = subject.diff_lines.last
+
+ expect(line.type).to be_nil
+ end
+ end
+ end
+
+ describe '#lines' do
+ context 'when scope is specified' do
+ let(:params) { { since: 2, to: 3 } }
+
+ it 'returns lines cropped by params' do
+ expect(subject.lines.size).to eq(2)
+ expect(subject.lines[0]).to include('LC2')
+ expect(subject.lines[1]).to include('LC3')
+ end
+ end
+
+ context 'when full is true' do
+ let(:params) { { full: true } }
+
+ it 'returns all lines' do
+ expect(subject.lines.size).to eq(3)
+ expect(subject.lines[0]).to include('LC1')
+ expect(subject.lines[1]).to include('LC2')
+ expect(subject.lines[2]).to include('LC3')
+ end
+ end
+ end
+
+ describe '#match_line_text' do
+ context 'when bottom is true' do
+ let(:params) { { since: 2, to: 3, bottom: true } }
+
+ it 'returns empty string' do
+ expect(subject.match_line_text).to eq('')
+ end
+ end
+
+ context 'when bottom is false' do
+ let(:params) { { since: 2, to: 3, bottom: false } }
+
+ it 'returns match line string' do
+ expect(subject.match_line_text).to eq("@@ -2,1+2,1 @@")
+ end
+ end
+ end
+end
diff --git a/spec/presenters/group_clusterable_presenter_spec.rb b/spec/presenters/group_clusterable_presenter_spec.rb
index 205160742bf..fa77273f6aa 100644
--- a/spec/presenters/group_clusterable_presenter_spec.rb
+++ b/spec/presenters/group_clusterable_presenter_spec.rb
@@ -69,6 +69,14 @@ describe GroupClusterablePresenter do
it { is_expected.to eq(install_applications_group_cluster_path(group, cluster, application)) }
end
+ describe '#update_applications_cluster_path' do
+ let(:application) { :helm }
+
+ subject { presenter.update_applications_cluster_path(cluster, application) }
+
+ it { is_expected.to eq(update_applications_group_cluster_path(group, cluster, application)) }
+ end
+
describe '#cluster_path' do
subject { presenter.cluster_path(cluster) }
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index bafcddebbb7..02cefcbc916 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -392,6 +392,29 @@ describe MergeRequestPresenter do
end
end
+ describe '#target_branch_path' do
+ subject do
+ described_class.new(resource, current_user: user).target_branch_path
+ end
+
+ context 'when target branch exists' do
+ it 'returns path' do
+ allow(resource).to receive(:target_branch_exists?) { true }
+
+ is_expected
+ .to eq("/#{resource.source_project.full_path}/branches/#{resource.target_branch}")
+ end
+ end
+
+ context 'when target branch does not exist' do
+ it 'returns nil' do
+ allow(resource).to receive(:target_branch_exists?) { false }
+
+ is_expected.to be_nil
+ end
+ end
+ end
+
describe '#source_branch_with_namespace_link' do
subject do
described_class.new(resource, current_user: user).source_branch_with_namespace_link
diff --git a/spec/presenters/project_clusterable_presenter_spec.rb b/spec/presenters/project_clusterable_presenter_spec.rb
index c50d90ae1e8..6786a84243f 100644
--- a/spec/presenters/project_clusterable_presenter_spec.rb
+++ b/spec/presenters/project_clusterable_presenter_spec.rb
@@ -69,6 +69,14 @@ describe ProjectClusterablePresenter do
it { is_expected.to eq(install_applications_project_cluster_path(project, cluster, application)) }
end
+ describe '#update_applications_cluster_path' do
+ let(:application) { :helm }
+
+ subject { presenter.update_applications_cluster_path(cluster, application) }
+
+ it { is_expected.to eq(update_applications_project_cluster_path(project, cluster, application)) }
+ end
+
describe '#cluster_path' do
subject { presenter.cluster_path(cluster) }
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 066f1d6862a..a132b85b878 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -1430,8 +1430,8 @@ describe API::Commits do
end
describe 'GET /projects/:id/repository/commits/:sha/merge_requests' do
- let!(:project) { create(:project, :repository, :private) }
- let!(:merged_mr) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'feature') }
+ let(:project) { create(:project, :repository, :private) }
+ let(:merged_mr) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'feature') }
let(:commit) { merged_mr.merge_request_diff.commits.last }
it 'returns the correct merge request' do
@@ -1456,6 +1456,17 @@ describe API::Commits do
expect(response).to have_gitlab_http_status(404)
end
+
+ context 'public project' do
+ let(:project) { create(:project, :repository, :public, :merge_requests_private) }
+ let(:non_member) { create(:user) }
+
+ it 'responds 403 when only members are allowed to read merge requests' do
+ get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", non_member)
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
end
describe 'GET /projects/:id/repository/commits/:sha/signature' do
diff --git a/spec/requests/api/graphql/metadata_query_spec.rb b/spec/requests/api/graphql/metadata_query_spec.rb
new file mode 100644
index 00000000000..4c56c559cf9
--- /dev/null
+++ b/spec/requests/api/graphql/metadata_query_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'getting project information' do
+ include GraphqlHelpers
+
+ let(:query) { graphql_query_for('metadata', {}, all_graphql_fields_for('Metadata')) }
+
+ context 'logged in' do
+ it 'returns version and revision' do
+ post_graphql(query, current_user: create(:user))
+
+ expect(graphql_errors).to be_nil
+ expect(graphql_data).to eq(
+ 'metadata' => {
+ 'version' => Gitlab::VERSION,
+ 'revision' => Gitlab.revision
+ }
+ )
+ end
+ end
+
+ context 'anonymous user' do
+ it 'returns nothing' do
+ post_graphql(query, current_user: nil)
+
+ expect(graphql_errors).to be_nil
+ expect(graphql_data).to eq('metadata' => nil)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
new file mode 100644
index 00000000000..cca87c16f27
--- /dev/null
+++ b/spec/requests/api/graphql_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe 'GraphQL' do
+ include GraphqlHelpers
+
+ let(:query) { graphql_query_for('echo', 'text' => 'Hello world' ) }
+
+ context 'graphql is disabled by feature flag' do
+ before do
+ stub_feature_flags(graphql: false)
+ end
+
+ it 'does not generate a route for GraphQL' do
+ expect { post_graphql(query) }.to raise_error(ActionController::RoutingError)
+ end
+ end
+
+ context 'invalid variables' do
+ it 'returns an error' do
+ post_graphql(query, variables: "This is not JSON")
+
+ expect(response).to have_gitlab_http_status(422)
+ expect(json_response['errors'].first['message']).not_to be_nil
+ end
+ end
+
+ context 'authentication', :allow_forgery_protection do
+ let(:user) { create(:user) }
+
+ it 'allows access to public data without authentication' do
+ post_graphql(query)
+
+ expect(graphql_data['echo']).to eq('nil says: Hello world')
+ end
+
+ it 'does not authenticate a user with an invalid CSRF' do
+ login_as(user)
+
+ post_graphql(query, headers: { 'X-CSRF-Token' => 'invalid' })
+
+ expect(graphql_data['echo']).to eq('nil says: Hello world')
+ end
+
+ it 'authenticates a user with a valid session token' do
+ # Create a session to get a CSRF token from
+ login_as(user)
+ get('/')
+
+ post '/api/graphql', params: { query: query }, headers: { 'X-CSRF-Token' => response.session['_csrf_token'] }
+
+ expect(graphql_data['echo']).to eq("\"#{user.username}\" says: Hello world")
+ end
+
+ context 'token authentication' do
+ let(:token) { create(:personal_access_token) }
+
+ before do
+ stub_authentication_activity_metrics(debug: false)
+ end
+
+ it 'Authenticates users with a PAT' do
+ expect(authentication_metrics)
+ .to increment(:user_authenticated_counter)
+ .and increment(:user_session_override_counter)
+ .and increment(:user_sessionless_authentication_counter)
+
+ post_graphql(query, headers: { 'PRIVATE-TOKEN' => token.token })
+
+ expect(graphql_data['echo']).to eq("\"#{token.user.username}\" says: Hello world")
+ end
+
+ context 'when the personal access token has no api scope' do
+ it 'does not log the user in' do
+ token.update(scopes: [:read_user])
+
+ post_graphql(query, headers: { 'PRIVATE-TOKEN' => token.token })
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(graphql_data['echo']).to eq('nil says: Hello world')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index e52f4c70407..66b9aae4b58 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -87,12 +87,12 @@ describe API::GroupVariables do
it 'creates variable' do
expect do
- post api("/groups/#{group.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'VALUE_2', protected: true }
+ post api("/groups/#{group.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'PROTECTED_VALUE_2', protected: true }
end.to change {group.variables.count}.by(1)
expect(response).to have_gitlab_http_status(201)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
- expect(json_response['value']).to eq('VALUE_2')
+ expect(json_response['value']).to eq('PROTECTED_VALUE_2')
expect(json_response['protected']).to be_truthy
end
diff --git a/spec/requests/api/issues_spec.rb b/spec/requests/api/issues_spec.rb
index 01bab2a1361..a5434d3ea80 100644
--- a/spec/requests/api/issues_spec.rb
+++ b/spec/requests/api/issues_spec.rb
@@ -49,7 +49,7 @@ describe API::Issues do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
let!(:label_link) { create(:label_link, label: label, target: issue) }
- set(:milestone) { create(:milestone, title: '1.0.0', project: project) }
+ let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
set(:empty_milestone) do
create(:milestone, title: '2.0.0', project: project)
end
@@ -271,7 +271,14 @@ describe API::Issues do
end
it 'returns an array of labeled issues' do
- get api("/issues", user), params: { labels: label.title }
+ get api('/issues', user), params: { labels: label.title }
+
+ expect_paginated_array_response(issue.id)
+ expect(json_response.first['labels']).to eq([label.title])
+ end
+
+ it 'returns an array of labeled issues with labels param as array' do
+ get api('/issues', user), params: { labels: [label.title] }
expect_paginated_array_response(issue.id)
expect(json_response.first['labels']).to eq([label.title])
@@ -284,7 +291,20 @@ describe API::Issues do
create(:label_link, label: label_b, target: issue)
create(:label_link, label: label_c, target: issue)
- get api("/issues", user), params: { labels: "#{label.title},#{label_b.title},#{label_c.title}" }
+ get api('/issues', user), params: { labels: "#{label.title},#{label_b.title},#{label_c.title}" }
+
+ expect_paginated_array_response(issue.id)
+ expect(json_response.first['labels']).to eq([label_c.title, label_b.title, label.title])
+ end
+
+ it 'returns an array of labeled issues when all labels matches with labels param as array' do
+ label_b = create(:label, title: 'foo', project: project)
+ label_c = create(:label, title: 'bar', project: project)
+
+ create(:label_link, label: label_b, target: issue)
+ create(:label_link, label: label_c, target: issue)
+
+ get api('/issues', user), params: { labels: [label.title, label_b.title, label_c.title] }
expect_paginated_array_response(issue.id)
expect(json_response.first['labels']).to eq([label_c.title, label_b.title, label.title])
@@ -296,8 +316,22 @@ describe API::Issues do
expect_paginated_array_response([])
end
+ it 'returns an empty array if no issue matches labels with labels param as array' do
+ get api('/issues', user), params: { labels: %w(foo bar) }
+
+ expect_paginated_array_response([])
+ end
+
it 'returns an array of labeled issues matching given state' do
- get api("/issues", user), params: { labels: label.title, state: :opened }
+ get api('/issues', user), params: { labels: label.title, state: :opened }
+
+ expect_paginated_array_response(issue.id)
+ expect(json_response.first['labels']).to eq([label.title])
+ expect(json_response.first['state']).to eq('opened')
+ end
+
+ it 'returns an array of labeled issues matching given state with labels param as array' do
+ get api('/issues', user), params: { labels: [label.title], state: :opened }
expect_paginated_array_response(issue.id)
expect(json_response.first['labels']).to eq([label.title])
@@ -305,25 +339,43 @@ describe API::Issues do
end
it 'returns an empty array if no issue matches labels and state filters' do
- get api("/issues", user), params: { labels: label.title, state: :closed }
+ get api('/issues', user), params: { labels: label.title, state: :closed }
expect_paginated_array_response([])
end
it 'returns an array of issues with any label' do
- get api("/issues", user), params: { labels: IssuesFinder::FILTER_ANY }
+ get api('/issues', user), params: { labels: IssuesFinder::FILTER_ANY }
+
+ expect_paginated_array_response(issue.id)
+ end
+
+ it 'returns an array of issues with any label with labels param as array' do
+ get api('/issues', user), params: { labels: [IssuesFinder::FILTER_ANY] }
expect_paginated_array_response(issue.id)
end
it 'returns an array of issues with no label' do
- get api("/issues", user), params: { labels: IssuesFinder::FILTER_NONE }
+ get api('/issues', user), params: { labels: IssuesFinder::FILTER_NONE }
+
+ expect_paginated_array_response(closed_issue.id)
+ end
+
+ it 'returns an array of issues with no label with labels param as array' do
+ get api('/issues', user), params: { labels: [IssuesFinder::FILTER_NONE] }
expect_paginated_array_response(closed_issue.id)
end
it 'returns an array of issues with no label when using the legacy No+Label filter' do
- get api("/issues", user), params: { labels: "No Label" }
+ get api('/issues', user), params: { labels: 'No Label' }
+
+ expect_paginated_array_response(closed_issue.id)
+ end
+
+ it 'returns an array of issues with no label when using the legacy No+Label filter with labels param as array' do
+ get api('/issues', user), params: { labels: ['No Label'] }
expect_paginated_array_response(closed_issue.id)
end
@@ -588,12 +640,25 @@ describe API::Issues do
expect(json_response.first['labels']).to eq([group_label.title])
end
+ it 'returns an array of labeled group issues with labels param as array' do
+ get api(base_url, user), params: { labels: [group_label.title] }
+
+ expect_paginated_array_response(group_issue.id)
+ expect(json_response.first['labels']).to eq([group_label.title])
+ end
+
it 'returns an array of labeled group issues where all labels match' do
get api(base_url, user), params: { labels: "#{group_label.title},foo,bar" }
expect_paginated_array_response([])
end
+ it 'returns an array of labeled group issues where all labels match with labels param as array' do
+ get api(base_url, user), params: { labels: [group_label.title, 'foo', 'bar'] }
+
+ expect_paginated_array_response([])
+ end
+
it 'returns issues matching given search string for title' do
get api(base_url, user), params: { search: group_issue.title }
@@ -619,6 +684,19 @@ describe API::Issues do
expect(json_response.first['labels']).to eq([label_c.title, label_b.title, group_label.title])
end
+ it 'returns an array of labeled issues when all labels matches with labels param as array' do
+ label_b = create(:label, title: 'foo', project: group_project)
+ label_c = create(:label, title: 'bar', project: group_project)
+
+ create(:label_link, label: label_b, target: group_issue)
+ create(:label_link, label: label_c, target: group_issue)
+
+ get api(base_url, user), params: { labels: [group_label.title, label_b.title, label_c.title] }
+
+ expect_paginated_array_response(group_issue.id)
+ expect(json_response.first['labels']).to eq([label_c.title, label_b.title, group_label.title])
+ end
+
it 'returns an array of issues found by iids' do
get api(base_url, user), params: { iids: [group_issue.iid] }
@@ -645,12 +723,25 @@ describe API::Issues do
expect(json_response.first['id']).to eq(group_issue.id)
end
+ it 'returns an array of group issues with any label with labels param as array' do
+ get api(base_url, user), params: { labels: [IssuesFinder::FILTER_ANY] }
+
+ expect_paginated_array_response(group_issue.id)
+ expect(json_response.first['id']).to eq(group_issue.id)
+ end
+
it 'returns an array of group issues with no label' do
get api(base_url, user), params: { labels: IssuesFinder::FILTER_NONE }
expect_paginated_array_response([group_closed_issue.id, group_confidential_issue.id])
end
+ it 'returns an array of group issues with no label with labels param as array' do
+ get api(base_url, user), params: { labels: [IssuesFinder::FILTER_NONE] }
+
+ expect_paginated_array_response([group_closed_issue.id, group_confidential_issue.id])
+ end
+
it 'returns an empty array if no issue matches milestone' do
get api(base_url, user), params: { milestone: group_empty_milestone.title }
@@ -842,6 +933,12 @@ describe API::Issues do
expect_paginated_array_response(issue.id)
end
+ it 'returns an array of labeled project issues with labels param as array' do
+ get api("#{base_url}/issues", user), params: { labels: [label.title] }
+
+ expect_paginated_array_response(issue.id)
+ end
+
it 'returns an array of labeled issues when all labels matches' do
label_b = create(:label, title: 'foo', project: project)
label_c = create(:label, title: 'bar', project: project)
@@ -854,6 +951,18 @@ describe API::Issues do
expect_paginated_array_response(issue.id)
end
+ it 'returns an array of labeled issues when all labels matches with labels param as array' do
+ label_b = create(:label, title: 'foo', project: project)
+ label_c = create(:label, title: 'bar', project: project)
+
+ create(:label_link, label: label_b, target: issue)
+ create(:label_link, label: label_c, target: issue)
+
+ get api("#{base_url}/issues", user), params: { labels: [label.title, label_b.title, label_c.title] }
+
+ expect_paginated_array_response(issue.id)
+ end
+
it 'returns issues matching given search string for title' do
get api("#{base_url}/issues?search=#{issue.title}", user)
@@ -890,12 +999,24 @@ describe API::Issues do
expect_paginated_array_response(issue.id)
end
+ it 'returns an array of project issues with any label with labels param as array' do
+ get api("#{base_url}/issues", user), params: { labels: [IssuesFinder::FILTER_ANY] }
+
+ expect_paginated_array_response(issue.id)
+ end
+
it 'returns an array of project issues with no label' do
get api("#{base_url}/issues", user), params: { labels: IssuesFinder::FILTER_NONE }
expect_paginated_array_response([confidential_issue.id, closed_issue.id])
end
+ it 'returns an array of project issues with no label with labels param as array' do
+ get api("#{base_url}/issues", user), params: { labels: [IssuesFinder::FILTER_NONE] }
+
+ expect_paginated_array_response([confidential_issue.id, closed_issue.id])
+ end
+
it 'returns an empty array if no project issue matches labels' do
get api("#{base_url}/issues", user), params: { labels: 'foo,bar' }
@@ -1215,6 +1336,19 @@ describe API::Issues do
expect(json_response['assignees'].first['name']).to eq(user2.name)
end
+ it 'creates a new project issue with labels param as array' do
+ post api("/projects/#{project.id}/issues", user),
+ params: { title: 'new issue', labels: %w(label label2), weight: 3, assignee_ids: [user2.id] }
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['title']).to eq('new issue')
+ expect(json_response['description']).to be_nil
+ expect(json_response['labels']).to eq(%w(label label2))
+ expect(json_response['confidential']).to be_falsy
+ expect(json_response['assignee']['name']).to eq(user2.name)
+ expect(json_response['assignees'].first['name']).to eq(user2.name)
+ end
+
it 'creates a new confidential project issue' do
post api("/projects/#{project.id}/issues", user),
params: { title: 'new issue', confidential: true }
@@ -1269,6 +1403,20 @@ describe API::Issues do
expect(json_response['labels']).to include '&'
end
+ it 'allows special label names with labels param as array' do
+ post api("/projects/#{project.id}/issues", user),
+ params: {
+ title: 'new issue',
+ labels: ['label', 'label?', 'label&foo, ?, &']
+ }
+ expect(response.status).to eq(201)
+ expect(json_response['labels']).to include 'label'
+ expect(json_response['labels']).to include 'label?'
+ expect(json_response['labels']).to include 'label&foo'
+ expect(json_response['labels']).to include '?'
+ expect(json_response['labels']).to include '&'
+ end
+
it 'returns 400 if title is too long' do
post api("/projects/#{project.id}/issues", user),
params: { title: 'g' * 256 }
@@ -1377,6 +1525,12 @@ describe API::Issues do
post api("/projects/#{project.id}/issues", non_member), params: { title: 'new issue', labels: 'label, label2' }
end.not_to change { project.labels.count }
end
+
+ it 'cannot create new labels with labels param as array' do
+ expect do
+ post api("/projects/#{project.id}/issues", non_member), params: { title: 'new issue', labels: %w(label label2) }
+ end.not_to change { project.labels.count }
+ end
end
end
@@ -1444,6 +1598,21 @@ describe API::Issues do
expect(json_response['labels']).to include '&'
end
+ it 'allows special label names with labels param as array' do
+ put api("/projects/#{project.id}/issues/#{issue.iid}", user),
+ params: {
+ title: 'updated title',
+ labels: ['label', 'label?', 'label&foo, ?, &']
+ }
+
+ expect(response.status).to eq(200)
+ expect(json_response['labels']).to include 'label'
+ expect(json_response['labels']).to include 'label?'
+ expect(json_response['labels']).to include 'label&foo'
+ expect(json_response['labels']).to include '?'
+ expect(json_response['labels']).to include '&'
+ end
+
context 'confidential issues' do
it "returns 403 for non project members" do
put api("/projects/#{project.id}/issues/#{confidential_issue.iid}", non_member),
@@ -1603,6 +1772,16 @@ describe API::Issues do
expect(json_response['updated_at']).to be > Time.now
end
+ it 'removes all labels and touches the record with labels param as array' do
+ Timecop.travel(1.minute.from_now) do
+ put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: { labels: [''] }
+ end
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['labels']).to eq([])
+ expect(json_response['updated_at']).to be > Time.now
+ end
+
it 'updates labels and touches the record' do
Timecop.travel(1.minute.from_now) do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
@@ -1614,6 +1793,17 @@ describe API::Issues do
expect(json_response['updated_at']).to be > Time.now
end
+ it 'updates labels and touches the record with labels param as array' do
+ Timecop.travel(1.minute.from_now) do
+ put api("/projects/#{project.id}/issues/#{issue.iid}", user),
+ params: { labels: %w(foo bar) }
+ end
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['labels']).to include 'foo'
+ expect(json_response['labels']).to include 'bar'
+ expect(json_response['updated_at']).to be > Time.now
+ end
+
it 'allows special label names' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: 'label:foo, label-bar,label_bar,label/bar,label?bar,label&bar,?,&' }
@@ -1628,6 +1818,20 @@ describe API::Issues do
expect(json_response['labels']).to include '&'
end
+ it 'allows special label names with labels param as array' do
+ put api("/projects/#{project.id}/issues/#{issue.iid}", user),
+ params: { labels: ['label:foo', 'label-bar', 'label_bar', 'label/bar,label?bar,label&bar,?,&'] }
+ expect(response.status).to eq(200)
+ expect(json_response['labels']).to include 'label:foo'
+ expect(json_response['labels']).to include 'label-bar'
+ expect(json_response['labels']).to include 'label_bar'
+ expect(json_response['labels']).to include 'label/bar'
+ expect(json_response['labels']).to include 'label?bar'
+ expect(json_response['labels']).to include 'label&bar'
+ expect(json_response['labels']).to include '?'
+ expect(json_response['labels']).to include '&'
+ end
+
it 'returns 400 if title is too long' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { title: 'g' * 256 }
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 3defe8bbf51..ed2ef4c730b 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -321,6 +321,49 @@ describe API::Jobs do
end
end
+ describe 'DELETE /projects/:id/jobs/:job_id/artifacts' do
+ let!(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
+
+ before do
+ delete api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
+
+ context 'when user is anonymous' do
+ let(:api_user) { nil }
+
+ it 'does not delete artifacts' do
+ expect(job.job_artifacts.size).to eq 2
+ end
+
+ it 'returns status 401 (unauthorized)' do
+ expect(response).to have_http_status :unauthorized
+ end
+ end
+
+ context 'with developer' do
+ it 'does not delete artifacts' do
+ expect(job.job_artifacts.size).to eq 2
+ end
+
+ it 'returns status 403 (forbidden)' do
+ expect(response).to have_http_status :forbidden
+ end
+ end
+
+ context 'with authorized user' do
+ let(:maintainer) { create(:project_member, :maintainer, project: project).user }
+ let!(:api_user) { maintainer }
+
+ it 'deletes artifacts' do
+ expect(job.job_artifacts.size).to eq 0
+ end
+
+ it 'returns status 204 (no content)' do
+ expect(response).to have_http_status :no_content
+ end
+ end
+ end
+
describe 'GET /projects/:id/jobs/:job_id/artifacts/:artifact_path' do
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 6272bb38d59..fee6312a9c7 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -617,26 +617,115 @@ describe API::MergeRequests do
end
end
- describe "POST /projects/:id/merge_requests" do
+ describe 'POST /projects/:id/merge_requests' do
context 'between branches projects' do
- it "returns merge_request" do
- post api("/projects/#{project.id}/merge_requests", user),
- params: {
- title: 'Test merge_request',
- source_branch: 'feature_conflict',
- target_branch: 'master',
- author: user,
- labels: 'label, label2',
- milestone_id: milestone.id,
- squash: true
- }
+ context 'different labels' do
+ let(:params) do
+ {
+ title: 'Test merge_request',
+ source_branch: 'feature_conflict',
+ target_branch: 'master',
+ author_id: user.id,
+ milestone_id: milestone.id,
+ squash: true
+ }
+ end
- expect(response).to have_gitlab_http_status(201)
- expect(json_response['title']).to eq('Test merge_request')
- expect(json_response['labels']).to eq(%w(label label2))
- expect(json_response['milestone']['id']).to eq(milestone.id)
- expect(json_response['squash']).to be_truthy
- expect(json_response['force_remove_source_branch']).to be_falsy
+ shared_examples_for 'creates merge request with labels' do
+ it 'returns merge_request' do
+ params[:labels] = labels
+ post api("/projects/#{project.id}/merge_requests", user), params: params
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['title']).to eq('Test merge_request')
+ expect(json_response['labels']).to eq(%w(label label2))
+ expect(json_response['milestone']['id']).to eq(milestone.id)
+ expect(json_response['squash']).to be_truthy
+ expect(json_response['force_remove_source_branch']).to be_falsy
+ end
+ end
+
+ it_behaves_like 'creates merge request with labels' do
+ let(:labels) { 'label, label2' }
+ end
+
+ it_behaves_like 'creates merge request with labels' do
+ let(:labels) { %w(label label2) }
+ end
+
+ it_behaves_like 'creates merge request with labels' do
+ let(:labels) { %w(label label2) }
+ end
+
+ it 'creates merge request with special label names' do
+ params[:labels] = 'label, label?, label&foo, ?, &'
+ post api("/projects/#{project.id}/merge_requests", user), params: params
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['labels']).to include 'label'
+ expect(json_response['labels']).to include 'label?'
+ expect(json_response['labels']).to include 'label&foo'
+ expect(json_response['labels']).to include '?'
+ expect(json_response['labels']).to include '&'
+ end
+
+ it 'creates merge request with special label names as array' do
+ params[:labels] = ['label', 'label?', 'label&foo, ?, &', '1, 2', 3, 4]
+ post api("/projects/#{project.id}/merge_requests", user), params: params
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['labels']).to include 'label'
+ expect(json_response['labels']).to include 'label?'
+ expect(json_response['labels']).to include 'label&foo'
+ expect(json_response['labels']).to include '?'
+ expect(json_response['labels']).to include '&'
+ expect(json_response['labels']).to include '1'
+ expect(json_response['labels']).to include '2'
+ expect(json_response['labels']).to include '3'
+ expect(json_response['labels']).to include '4'
+ end
+
+ it 'empty label param does not add any labels' do
+ params[:labels] = ''
+ post api("/projects/#{project.id}/merge_requests", user), params: params
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['labels']).to eq([])
+ end
+
+ it 'empty label param as array does not add any labels, but only explicitly as json' do
+ params[:labels] = []
+ post api("/projects/#{project.id}/merge_requests", user),
+ params: params.to_json,
+ headers: { 'Content-Type': 'application/json' }
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['labels']).to eq([])
+ end
+
+ xit 'empty label param as array, does not add any labels' do
+ params[:labels] = []
+ post api("/projects/#{project.id}/merge_requests", user), params: params
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['labels']).to eq([])
+ end
+
+ it 'array with one empty string element does not add labels' do
+ params[:labels] = ['']
+ post api("/projects/#{project.id}/merge_requests", user), params: params
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['labels']).to eq([])
+ end
+
+ it 'array with multiple empty string elements, does not add labels' do
+ params[:labels] = ['', '', '']
+ post api("/projects/#{project.id}/merge_requests", user), params: params
+
+ expect(response).to have_gitlab_http_status(201)
+ expect(json_response['labels']).to eq([])
+ end
end
it "returns 422 when source_branch equals target_branch" do
@@ -663,23 +752,6 @@ describe API::MergeRequests do
expect(response).to have_gitlab_http_status(400)
end
- it 'allows special label names' do
- post api("/projects/#{project.id}/merge_requests", user),
- params: {
- title: 'Test merge_request',
- source_branch: 'markdown',
- target_branch: 'master',
- author: user,
- labels: 'label, label?, label&foo, ?, &'
- }
- expect(response).to have_gitlab_http_status(201)
- expect(json_response['labels']).to include 'label'
- expect(json_response['labels']).to include 'label?'
- expect(json_response['labels']).to include 'label&foo'
- expect(json_response['labels']).to include '?'
- expect(json_response['labels']).to include '&'
- end
-
context 'with existing MR' do
before do
post api("/projects/#{project.id}/merge_requests", user),
@@ -1060,18 +1132,6 @@ describe API::MergeRequests do
expect(response).to have_gitlab_http_status(404)
end
-
- it "returns 400 when merge method is not supported" do
- merge_request.project.update!(merge_method: 'ff')
-
- put api(url, user)
-
- expected_error =
- 'Fast-forward to refs/merge-requests/1/merge is currently not supported.'
-
- expect(response).to have_gitlab_http_status(400)
- expect(json_response['message']).to eq(expected_error)
- end
end
describe "PUT /projects/:id/merge_requests/:merge_request_iid" do
@@ -1122,19 +1182,97 @@ describe API::MergeRequests do
expect(json_response['force_remove_source_branch']).to be_truthy
end
- it 'allows special label names' do
- put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user),
- params: {
- title: 'new issue',
- labels: 'label, label?, label&foo, ?, &'
- }
+ context 'when updating labels' do
+ it 'allows special label names' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user),
+ params: {
+ title: 'new issue',
+ labels: 'label, label?, label&foo, ?, &'
+ }
+
+ expect(response.status).to eq(200)
+ expect(json_response['labels']).to include 'label'
+ expect(json_response['labels']).to include 'label?'
+ expect(json_response['labels']).to include 'label&foo'
+ expect(json_response['labels']).to include '?'
+ expect(json_response['labels']).to include '&'
+ end
- expect(response.status).to eq(200)
- expect(json_response['labels']).to include 'label'
- expect(json_response['labels']).to include 'label?'
- expect(json_response['labels']).to include 'label&foo'
- expect(json_response['labels']).to include '?'
- expect(json_response['labels']).to include '&'
+ it 'also accepts labels as an array' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user),
+ params: {
+ title: 'new issue',
+ labels: ['label', 'label?', 'label&foo, ?, &', '1, 2', 3, 4]
+ }
+
+ expect(response.status).to eq(200)
+ expect(json_response['labels']).to include 'label'
+ expect(json_response['labels']).to include 'label?'
+ expect(json_response['labels']).to include 'label&foo'
+ expect(json_response['labels']).to include '?'
+ expect(json_response['labels']).to include '&'
+ expect(json_response['labels']).to include '1'
+ expect(json_response['labels']).to include '2'
+ expect(json_response['labels']).to include '3'
+ expect(json_response['labels']).to include '4'
+ end
+
+ it 'empty label param removes labels' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user),
+ params: {
+ title: 'new issue',
+ labels: ''
+ }
+
+ expect(response.status).to eq(200)
+ expect(json_response['labels']).to eq []
+ end
+
+ it 'label param as empty array, but only explicitly as json, removes labels' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user),
+ params: {
+ title: 'new issue',
+ labels: []
+ }.to_json,
+ headers: { 'Content-Type' => 'application/json' }
+
+ expect(response.status).to eq(200)
+ expect(json_response['labels']).to eq []
+ end
+
+ xit 'empty label as array, removes labels' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user),
+ params: {
+ title: 'new issue',
+ labels: []
+ }
+
+ expect(response.status).to eq(200)
+ # fails, as grape ommits for some reason empty array as optional param value, so nothing it passed along
+ expect(json_response['labels']).to eq []
+ end
+
+ it 'array with one empty string element removes labels' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user),
+ params: {
+ title: 'new issue',
+ labels: ['']
+ }
+
+ expect(response.status).to eq(200)
+ expect(json_response['labels']).to eq []
+ end
+
+ it 'array with multiple empty string elements, removes labels' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user),
+ params: {
+ title: 'new issue',
+ labels: ['', '', '']
+ }
+
+ expect(response.status).to eq(200)
+ expect(json_response['labels']).to eq []
+ end
end
it 'does not update state when title is empty' do
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 856fe1bbe89..60d9d7fed13 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -136,6 +136,7 @@ describe API::Projects do
end
let!(:public_project) { create(:project, :public, name: 'public_project') }
+
before do
project
project2
@@ -968,8 +969,16 @@ describe API::Projects do
describe 'GET /projects/:id' do
context 'when unauthenticated' do
- it 'returns the public projects' do
- public_project = create(:project, :public)
+ it 'does not return private projects' do
+ private_project = create(:project, :private)
+
+ get api("/projects/#{private_project.id}")
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+
+ it 'returns public projects' do
+ public_project = create(:project, :repository, :public)
get api("/projects/#{public_project.id}")
@@ -977,8 +986,34 @@ describe API::Projects do
expect(json_response['id']).to eq(public_project.id)
expect(json_response['description']).to eq(public_project.description)
expect(json_response['default_branch']).to eq(public_project.default_branch)
+ expect(json_response['ci_config_path']).to eq(public_project.ci_config_path)
expect(json_response.keys).not_to include('permissions')
end
+
+ context 'and the project has a private repository' do
+ let(:project) { create(:project, :repository, :public, :repository_private) }
+ let(:protected_attributes) { %w(default_branch ci_config_path) }
+
+ it 'hides protected attributes of private repositories if user is not a member' do
+ get api("/projects/#{project.id}", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ protected_attributes.each do |attribute|
+ expect(json_response.keys).not_to include(attribute)
+ end
+ end
+
+ it 'exposes protected attributes of private repositories if user is a member' do
+ project.add_developer(user)
+
+ get api("/projects/#{project.id}", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ protected_attributes.each do |attribute|
+ expect(json_response.keys).to include(attribute)
+ end
+ end
+ end
end
context 'when authenticated' do
@@ -1130,6 +1165,26 @@ describe API::Projects do
expect(json_response).to include 'statistics'
end
+ context "and the project has a private repository" do
+ let(:project) { create(:project, :public, :repository, :repository_private) }
+
+ it "does not include statistics if user is not a member" do
+ get api("/projects/#{project.id}", user), params: { statistics: true }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).not_to include 'statistics'
+ end
+
+ it "includes statistics if user is a member" do
+ project.add_developer(user)
+
+ get api("/projects/#{project.id}", user), params: { statistics: true }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to include 'statistics'
+ end
+ end
+
it "includes import_error if user can admin project" do
get api("/projects/#{project.id}", user)
@@ -1510,6 +1565,9 @@ describe API::Projects do
describe "POST /projects/:id/share" do
let(:group) { create(:group) }
+ before do
+ group.add_developer(user)
+ end
it "shares project with group" do
expires_at = 10.days.from_now.to_date
@@ -1560,6 +1618,15 @@ describe API::Projects do
expect(response).to have_gitlab_http_status(400)
expect(json_response['error']).to eq 'group_access does not have a valid value'
end
+
+ it "returns a 409 error when link is not saved" do
+ allow(::Projects::GroupLinks::CreateService).to receive_message_chain(:new, :execute)
+ .and_return({ status: :error, http_status: 409, message: 'error' })
+
+ post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::DEVELOPER }
+
+ expect(response).to have_gitlab_http_status(409)
+ end
end
describe 'DELETE /projects/:id/share/:group_id' do
diff --git a/spec/requests/api/release/links_spec.rb b/spec/requests/api/release/links_spec.rb
index ba948e37e2f..3a59052bb29 100644
--- a/spec/requests/api/release/links_spec.rb
+++ b/spec/requests/api/release/links_spec.rb
@@ -73,6 +73,22 @@ describe API::Release::Links do
expect(response).to have_gitlab_http_status(:ok)
end
end
+
+ context 'when project is public and the repository is private' do
+ let(:project) { create(:project, :repository, :public, :repository_private) }
+
+ it_behaves_like '403 response' do
+ let(:request) { get api("/projects/#{project.id}/releases/v0.1/assets/links", non_project_member) }
+ end
+
+ context 'when the release does not exists' do
+ let!(:release) { }
+
+ it_behaves_like '403 response' do
+ let(:request) { get api("/projects/#{project.id}/releases/v0.1/assets/links", non_project_member) }
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index e6c235ca26e..9087cccb759 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -436,9 +436,9 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
let(:expected_variables) do
- [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true },
- { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true },
- { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true }]
+ [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
+ { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
+ { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false }]
end
let(:expected_artifacts) do
@@ -549,7 +549,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
end
context 'when job is made for merge request' do
- let(:pipeline) { create(:ci_pipeline_without_jobs, source: :merge_request, project: project, ref: 'feature', merge_request: merge_request) }
+ let(:pipeline) { create(:ci_pipeline_without_jobs, source: :merge_request_event, project: project, ref: 'feature', merge_request: merge_request) }
let!(:job) { create(:ci_build, pipeline: pipeline, name: 'spinach', ref: 'feature', stage: 'test', stage_idx: 0) }
let(:merge_request) { create(:merge_request) }
@@ -740,12 +740,12 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
context 'when triggered job is available' do
let(:expected_variables) do
- [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true },
- { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true },
- { 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true },
- { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true },
- { 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false },
- { 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false }]
+ [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
+ { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
+ { 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true, 'masked' => false },
+ { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false },
+ { 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false, 'masked' => false },
+ { 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false, 'masked' => false }]
end
let(:trigger) { create(:ci_trigger, project: project) }
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index 5ca442bc448..5548e3fd01a 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -90,6 +90,17 @@ describe API::Runners do
expect(response).to have_gitlab_http_status(400)
end
+
+ it 'filters runners by tag_list' do
+ create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
+ create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
+
+ get api('/runners?tag_list=tag1,tag2', user)
+
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Runner tagged with tag1 and tag2')
+ ]
+ end
end
context 'unauthorized user' do
@@ -181,6 +192,17 @@ describe API::Runners do
expect(response).to have_gitlab_http_status(400)
end
+
+ it 'filters runners by tag_list' do
+ create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
+ create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
+
+ get api('/runners/all?tag_list=tag1,tag2', admin)
+
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Runner tagged with tag1 and tag2')
+ ]
+ end
end
context 'without admin privileges' do
@@ -716,6 +738,17 @@ describe API::Runners do
expect(response).to have_gitlab_http_status(400)
end
+
+ it 'filters runners by tag_list' do
+ create(:ci_runner, :project, description: 'Runner tagged with tag1 and tag2', projects: [project], tag_list: %w[tag1 tag2])
+ create(:ci_runner, :project, description: 'Runner tagged with tag2', projects: [project], tag_list: ['tag2'])
+
+ get api("/projects/#{project.id}/runners?tag_list=tag1,tag2", user)
+
+ expect(json_response).to match_array [
+ a_hash_including('description' => 'Runner tagged with tag1 and tag2')
+ ]
+ end
end
context 'authorized user without maintainer privileges' do
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 7c8512f7589..d600076e9fb 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -84,10 +84,17 @@ describe API::Snippets do
end
describe 'GET /snippets/:id/raw' do
- let(:snippet) { create(:personal_snippet, author: user) }
+ set(:author) { create(:user) }
+ set(:snippet) { create(:personal_snippet, :private, author: author) }
+
+ it 'requires authentication' do
+ get api("/snippets/#{snippet.id}", nil)
+
+ expect(response).to have_gitlab_http_status(401)
+ end
it 'returns raw text' do
- get api("/snippets/#{snippet.id}/raw", user)
+ get api("/snippets/#{snippet.id}/raw", author)
expect(response).to have_gitlab_http_status(200)
expect(response.content_type).to eq 'text/plain'
@@ -95,38 +102,83 @@ describe API::Snippets do
end
it 'forces attachment content disposition' do
- get api("/snippets/#{snippet.id}/raw", user)
+ get api("/snippets/#{snippet.id}/raw", author)
expect(headers['Content-Disposition']).to match(/^attachment/)
end
it 'returns 404 for invalid snippet id' do
- get api("/snippets/1234/raw", user)
+ snippet.destroy
+
+ get api("/snippets/#{snippet.id}/raw", author)
expect(response).to have_gitlab_http_status(404)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
+
+ it 'hides private snippets from ordinary users' do
+ get api("/snippets/#{snippet.id}/raw", user)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+
+ it 'shows internal snippets to ordinary users' do
+ internal_snippet = create(:personal_snippet, :internal, author: author)
+
+ get api("/snippets/#{internal_snippet.id}/raw", user)
+
+ expect(response).to have_gitlab_http_status(200)
+ end
end
describe 'GET /snippets/:id' do
- let(:snippet) { create(:personal_snippet, author: user) }
+ set(:admin) { create(:user, :admin) }
+ set(:author) { create(:user) }
+ set(:private_snippet) { create(:personal_snippet, :private, author: author) }
+ set(:internal_snippet) { create(:personal_snippet, :internal, author: author) }
+
+ it 'requires authentication' do
+ get api("/snippets/#{private_snippet.id}", nil)
+
+ expect(response).to have_gitlab_http_status(401)
+ end
it 'returns snippet json' do
- get api("/snippets/#{snippet.id}", user)
+ get api("/snippets/#{private_snippet.id}", author)
expect(response).to have_gitlab_http_status(200)
- expect(json_response['title']).to eq(snippet.title)
- expect(json_response['description']).to eq(snippet.description)
- expect(json_response['file_name']).to eq(snippet.file_name)
- expect(json_response['visibility']).to eq(snippet.visibility)
+ expect(json_response['title']).to eq(private_snippet.title)
+ expect(json_response['description']).to eq(private_snippet.description)
+ expect(json_response['file_name']).to eq(private_snippet.file_name)
+ expect(json_response['visibility']).to eq(private_snippet.visibility)
+ end
+
+ it 'shows private snippets to an admin' do
+ get api("/snippets/#{private_snippet.id}", admin)
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'hides private snippets from an ordinary user' do
+ get api("/snippets/#{private_snippet.id}", user)
+
+ expect(response).to have_gitlab_http_status(404)
+ end
+
+ it 'shows internal snippets to an ordinary user' do
+ get api("/snippets/#{internal_snippet.id}", user)
+
+ expect(response).to have_gitlab_http_status(200)
end
it 'returns 404 for invalid snippet id' do
- get api("/snippets/1234", user)
+ private_snippet.destroy
+
+ get api("/snippets/#{private_snippet.id}", admin)
expect(response).to have_gitlab_http_status(404)
- expect(json_response['message']).to eq('404 Not found')
+ expect(json_response['message']).to eq('404 Snippet Not Found')
end
end
diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb
index f121a1d3b78..9f0d5ad5d12 100644
--- a/spec/requests/api/todos_spec.rb
+++ b/spec/requests/api/todos_spec.rb
@@ -8,10 +8,14 @@ describe API::Todos do
let(:author_2) { create(:user) }
let(:john_doe) { create(:user, username: 'john_doe') }
let(:merge_request) { create(:merge_request, source_project: project_1) }
+ let!(:merge_request_todo) { create(:todo, project: project_1, author: author_2, user: john_doe, target: merge_request) }
let!(:pending_1) { create(:todo, :mentioned, project: project_1, author: author_1, user: john_doe) }
let!(:pending_2) { create(:todo, project: project_2, author: author_2, user: john_doe) }
let!(:pending_3) { create(:on_commit_todo, project: project_1, author: author_2, user: john_doe) }
let!(:done) { create(:todo, :done, project: project_1, author: author_1, user: john_doe) }
+ let!(:award_emoji_1) { create(:award_emoji, awardable: merge_request, user: author_1, name: 'thumbsup') }
+ let!(:award_emoji_2) { create(:award_emoji, awardable: pending_1.target, user: author_1, name: 'thumbsup') }
+ let!(:award_emoji_3) { create(:award_emoji, awardable: pending_2.target, user: author_2, name: 'thumbsdown') }
before do
project_1.add_developer(john_doe)
@@ -34,7 +38,7 @@ describe API::Todos do
expect(response.status).to eq(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.length).to eq(3)
+ expect(json_response.length).to eq(4)
expect(json_response[0]['id']).to eq(pending_3.id)
expect(json_response[0]['project']).to be_a Hash
expect(json_response[0]['author']).to be_a Hash
@@ -45,6 +49,23 @@ describe API::Todos do
expect(json_response[0]['state']).to eq('pending')
expect(json_response[0]['action_name']).to eq('assigned')
expect(json_response[0]['created_at']).to be_present
+ expect(json_response[0]['target_type']).to eq('Commit')
+
+ expect(json_response[1]['target_type']).to eq('Issue')
+ expect(json_response[1]['target']['upvotes']).to eq(0)
+ expect(json_response[1]['target']['downvotes']).to eq(1)
+ expect(json_response[1]['target']['merge_requests_count']).to eq(0)
+
+ expect(json_response[2]['target_type']).to eq('Issue')
+ expect(json_response[2]['target']['upvotes']).to eq(1)
+ expect(json_response[2]['target']['downvotes']).to eq(0)
+ expect(json_response[2]['target']['merge_requests_count']).to eq(0)
+
+ expect(json_response[3]['target_type']).to eq('MergeRequest')
+ # Only issues get a merge request count at the moment
+ expect(json_response[3]['target']['merge_requests_count']).to be_nil
+ expect(json_response[3]['target']['upvotes']).to eq(1)
+ expect(json_response[3]['target']['downvotes']).to eq(0)
end
context 'and using the author filter' do
@@ -54,7 +75,7 @@ describe API::Todos do
expect(response.status).to eq(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect(json_response.length).to eq(3)
end
end
@@ -67,7 +88,7 @@ describe API::Todos do
expect(response.status).to eq(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect(json_response.length).to eq(2)
end
end
@@ -100,7 +121,7 @@ describe API::Todos do
expect(response.status).to eq(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.length).to eq(2)
+ expect(json_response.length).to eq(3)
end
end
@@ -115,6 +136,27 @@ describe API::Todos do
end
end
end
+
+ it 'avoids N+1 queries', :request_store do
+ create(:todo, project: project_1, author: author_2, user: john_doe, target: merge_request)
+
+ get api('/todos', john_doe)
+
+ control = ActiveRecord::QueryRecorder.new { get api('/todos', john_doe) }
+
+ merge_request_2 = create(:merge_request, source_project: project_2)
+ create(:todo, project: project_2, author: author_2, user: john_doe, target: merge_request_2)
+
+ project_3 = create(:project, :repository)
+ project_3.add_developer(john_doe)
+ merge_request_3 = create(:merge_request, source_project: project_3)
+ create(:todo, project: project_3, author: author_2, user: john_doe, target: merge_request_3)
+ create(:todo, :mentioned, project: project_1, author: author_1, user: john_doe)
+ create(:on_commit_todo, project: project_3, author: author_1, user: john_doe)
+
+ expect { get api('/todos', john_doe) }.not_to exceed_query_limit(control)
+ expect(response.status).to eq(200)
+ end
end
describe 'POST /todos/:id/mark_as_done' do
@@ -230,7 +272,7 @@ describe API::Todos do
context 'for a merge request' do
it_behaves_like 'an issuable', 'merge_requests' do
- let(:issuable) { merge_request }
+ let(:issuable) { create(:merge_request, :simple, source_project: project_1) }
end
end
end
diff --git a/spec/requests/api/variables_spec.rb b/spec/requests/api/variables_spec.rb
index cdac5b2f400..5df6baf0ddf 100644
--- a/spec/requests/api/variables_spec.rb
+++ b/spec/requests/api/variables_spec.rb
@@ -73,12 +73,12 @@ describe API::Variables do
context 'authorized user with proper permissions' do
it 'creates variable' do
expect do
- post api("/projects/#{project.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'VALUE_2', protected: true }
+ post api("/projects/#{project.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'PROTECTED_VALUE_2', protected: true }
end.to change {project.variables.count}.by(1)
expect(response).to have_gitlab_http_status(201)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
- expect(json_response['value']).to eq('VALUE_2')
+ expect(json_response['value']).to eq('PROTECTED_VALUE_2')
expect(json_response['protected']).to be_truthy
end
diff --git a/spec/requests/api/version_spec.rb b/spec/requests/api/version_spec.rb
index 38b618191fb..e06f8bbc095 100644
--- a/spec/requests/api/version_spec.rb
+++ b/spec/requests/api/version_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe API::Version do
- describe 'GET /version' do
+ shared_examples_for 'GET /version' do
context 'when unauthenticated' do
it 'returns authentication error' do
get api('/version')
@@ -22,4 +22,20 @@ describe API::Version do
end
end
end
+
+ context 'with graphql enabled' do
+ before do
+ stub_feature_flags(graphql: true)
+ end
+
+ include_examples 'GET /version'
+ end
+
+ context 'with graphql disabled' do
+ before do
+ stub_feature_flags(graphql: false)
+ end
+
+ include_examples 'GET /version'
+ end
end
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 5b625fd47be..bfa178f5cae 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -104,6 +104,70 @@ describe 'Git HTTP requests' do
end
end
+ shared_examples_for 'project path without .git suffix' do
+ context "GET info/refs" do
+ let(:path) { "/#{project_path}/info/refs" }
+
+ context "when no params are added" do
+ before do
+ get path
+ end
+
+ it "redirects to the .git suffix version" do
+ expect(response).to redirect_to("/#{project_path}.git/info/refs")
+ end
+ end
+
+ context "when the upload-pack service is requested" do
+ let(:params) { { service: 'git-upload-pack' } }
+
+ before do
+ get path, params: params
+ end
+
+ it "redirects to the .git suffix version" do
+ expect(response).to redirect_to("/#{project_path}.git/info/refs?service=#{params[:service]}")
+ end
+ end
+
+ context "when the receive-pack service is requested" do
+ let(:params) { { service: 'git-receive-pack' } }
+
+ before do
+ get path, params: params
+ end
+
+ it "redirects to the .git suffix version" do
+ expect(response).to redirect_to("/#{project_path}.git/info/refs?service=#{params[:service]}")
+ end
+ end
+
+ context "when the params are anything else" do
+ let(:params) { { service: 'git-implode-pack' } }
+
+ before do
+ get path, params: params
+ end
+
+ it "redirects to the sign-in page" do
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+
+ context "POST git-upload-pack" do
+ it "fails to find a route" do
+ expect { clone_post(project_path) }.to raise_error(ActionController::RoutingError)
+ end
+ end
+
+ context "POST git-receive-pack" do
+ it "fails to find a route" do
+ expect { push_post(project_path) }.to raise_error(ActionController::RoutingError)
+ end
+ end
+ end
+
describe "User with no identities" do
let(:user) { create(:user) }
@@ -143,6 +207,10 @@ describe 'Git HTTP requests' do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
+
+ it_behaves_like 'project path without .git suffix' do
+ let(:project_path) { "#{user.namespace.path}/project.git-project" }
+ end
end
end
@@ -706,70 +774,8 @@ describe 'Git HTTP requests' do
end
end
- context "when the project path doesn't end in .git" do
- let(:project) { create(:project, :repository, :public, path: 'project.git-project') }
-
- context "GET info/refs" do
- let(:path) { "/#{project.full_path}/info/refs" }
-
- context "when no params are added" do
- before do
- get path
- end
-
- it "redirects to the .git suffix version" do
- expect(response).to redirect_to("/#{project.full_path}.git/info/refs")
- end
- end
-
- context "when the upload-pack service is requested" do
- let(:params) { { service: 'git-upload-pack' } }
-
- before do
- get path, params: params
- end
-
- it "redirects to the .git suffix version" do
- expect(response).to redirect_to("/#{project.full_path}.git/info/refs?service=#{params[:service]}")
- end
- end
-
- context "when the receive-pack service is requested" do
- let(:params) { { service: 'git-receive-pack' } }
-
- before do
- get path, params: params
- end
-
- it "redirects to the .git suffix version" do
- expect(response).to redirect_to("/#{project.full_path}.git/info/refs?service=#{params[:service]}")
- end
- end
-
- context "when the params are anything else" do
- let(:params) { { service: 'git-implode-pack' } }
-
- before do
- get path, params: params
- end
-
- it "redirects to the sign-in page" do
- expect(response).to redirect_to(new_user_session_path)
- end
- end
- end
-
- context "POST git-upload-pack" do
- it "fails to find a route" do
- expect { clone_post(project.full_path) }.to raise_error(ActionController::RoutingError)
- end
- end
-
- context "POST git-receive-pack" do
- it "fails to find a route" do
- expect { push_post(project.full_path) }.to raise_error(ActionController::RoutingError)
- end
- end
+ it_behaves_like 'project path without .git suffix' do
+ let(:project_path) { create(:project, :repository, :public, path: 'project.git-project').full_path }
end
context "retrieving an info/refs file" do
diff --git a/spec/serializers/merge_request_for_pipeline_entity_spec.rb b/spec/serializers/merge_request_for_pipeline_entity_spec.rb
new file mode 100644
index 00000000000..e49b45bc7d7
--- /dev/null
+++ b/spec/serializers/merge_request_for_pipeline_entity_spec.rb
@@ -0,0 +1,29 @@
+require 'spec_helper'
+
+describe MergeRequestForPipelineEntity do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+ let(:request) { EntityRequest.new(project: project) }
+ let(:merge_request) { create(:merge_request, target_project: project, source_project: project) }
+ let(:presenter) { MergeRequestPresenter.new(merge_request, current_user: user) }
+
+ let(:entity) do
+ described_class.new(presenter, request: request)
+ end
+
+ before do
+ project.add_developer(user)
+ end
+
+ context 'as json' do
+ subject { entity.as_json }
+
+ it 'exposes needed attributes' do
+ expect(subject).to include(
+ :iid, :path, :title,
+ :source_branch, :source_branch_path,
+ :target_branch, :target_branch_path
+ )
+ end
+ end
+end
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index 774486dcb6d..11040862129 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe PipelineEntity do
+ include Gitlab::Routing
+
set(:user) { create(:user) }
let(:request) { double('request') }
@@ -128,5 +130,48 @@ describe PipelineEntity do
.to eq 'CI/CD YAML configuration error!'
end
end
+
+ context 'when pipeline is detached merge request pipeline' do
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:project) { merge_request.target_project }
+ let(:pipeline) { merge_request.merge_request_pipelines.first }
+
+ it 'makes detached flag true' do
+ expect(subject[:flags][:detached]).to be_truthy
+ end
+
+ context 'when user is a developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'has merge request information' do
+ expect(subject[:merge_request][:iid]).to eq(merge_request.iid)
+
+ expect(project_merge_request_path(project, merge_request))
+ .to include(subject[:merge_request][:path])
+
+ expect(subject[:merge_request][:title]).to eq(merge_request.title)
+
+ expect(subject[:merge_request][:source_branch])
+ .to eq(merge_request.source_branch)
+
+ expect(project_branch_path(project, merge_request.source_branch))
+ .to include(subject[:merge_request][:source_branch_path])
+
+ expect(subject[:merge_request][:target_branch])
+ .to eq(merge_request.target_branch)
+
+ expect(project_branch_path(project, merge_request.target_branch))
+ .to include(subject[:merge_request][:target_branch_path])
+ end
+ end
+
+ context 'when user is an external user' do
+ it 'has no merge request information' do
+ expect(subject[:merge_request]).to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index 2bdcb2a45f6..a21487938a0 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -97,6 +97,44 @@ describe PipelineSerializer do
end
end
+ context 'when there are pipelines for merge requests' do
+ let(:resource) { Ci::Pipeline.all }
+
+ let!(:merge_request_1) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ target_project: project,
+ target_branch: 'master',
+ source_project: project,
+ source_branch: 'feature-1')
+ end
+
+ let!(:merge_request_2) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ target_project: project,
+ target_branch: 'master',
+ source_project: project,
+ source_branch: 'feature-2')
+ end
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'includes merge requests information' do
+ expect(subject.all? { |entry| entry[:merge_request].present? }).to be_truthy
+ end
+
+ it 'preloads related merge requests', :postgresql do
+ recorded = ActiveRecord::QueryRecorder.new { subject }
+
+ expect(recorded.log)
+ .to include("SELECT \"merge_requests\".* FROM \"merge_requests\" " \
+ "WHERE \"merge_requests\".\"id\" IN (#{merge_request_1.id}, #{merge_request_2.id})")
+ end
+ end
+
describe 'number of queries when preloaded' do
subject { serializer.represent(resource, preload: true) }
let(:resource) { Ci::Pipeline.all }
diff --git a/spec/serializers/provider_repo_entity_spec.rb b/spec/serializers/provider_repo_entity_spec.rb
index b67115bab10..9a1160d16d5 100644
--- a/spec/serializers/provider_repo_entity_spec.rb
+++ b/spec/serializers/provider_repo_entity_spec.rb
@@ -13,7 +13,7 @@ describe ProviderRepoEntity do
describe '#as_json' do
subject { entity.as_json }
- it 'includes requried fields' do
+ it 'includes required fields' do
expect(subject[:id]).to eq(provider_repo[:id])
expect(subject[:full_name]).to eq(provider_repo[:full_name])
expect(subject[:owner_name]).to eq(provider_repo[:owner][:login])
diff --git a/spec/services/boards/visits/latest_service_spec.rb b/spec/services/boards/visits/latest_service_spec.rb
index e55d599e2cc..c8a0a5e4243 100644
--- a/spec/services/boards/visits/latest_service_spec.rb
+++ b/spec/services/boards/visits/latest_service_spec.rb
@@ -23,6 +23,12 @@ describe Boards::Visits::LatestService do
service.execute
end
+
+ it 'queries for last N visits' do
+ expect(BoardProjectRecentVisit).to receive(:latest).with(user, project, count: 5).once
+
+ described_class.new(project_board.parent, user, count: 5).execute
+ end
end
context 'when a group board' do
@@ -42,6 +48,12 @@ describe Boards::Visits::LatestService do
service.execute
end
+
+ it 'queries for last N visits' do
+ expect(BoardGroupRecentVisit).to receive(:latest).with(user, group, count: 5).once
+
+ described_class.new(group_board.parent, user, count: 5).execute
+ end
end
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 93349ba7b5b..24707cd2d41 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -701,7 +701,7 @@ describe Ci::CreatePipelineService do
let(:target_sha) { nil }
context 'when source is merge request' do
- let(:source) { :merge_request }
+ let(:source) { :merge_request_event }
context "when config has merge_requests keywords" do
let(:config) do
@@ -734,7 +734,7 @@ describe Ci::CreatePipelineService do
it 'creates a merge request pipeline' do
expect(pipeline).to be_persisted
- expect(pipeline).to be_merge_request
+ expect(pipeline).to be_merge_request_event
expect(pipeline.merge_request).to eq(merge_request)
expect(pipeline.builds.order(:stage_id).map(&:name)).to eq(%w[test])
end
diff --git a/spec/services/clusters/applications/patch_service_spec.rb b/spec/services/clusters/applications/patch_service_spec.rb
new file mode 100644
index 00000000000..d4ee3243b84
--- /dev/null
+++ b/spec/services/clusters/applications/patch_service_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Applications::PatchService do
+ describe '#execute' do
+ let(:application) { create(:clusters_applications_knative, :scheduled) }
+ let!(:update_command) { application.update_command }
+ let(:service) { described_class.new(application) }
+ let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::Api) }
+
+ before do
+ allow(service).to receive(:update_command).and_return(update_command)
+ allow(service).to receive(:helm_api).and_return(helm_client)
+ end
+
+ context 'when there are no errors' do
+ before do
+ expect(helm_client).to receive(:update).with(update_command)
+ allow(ClusterWaitForAppInstallationWorker).to receive(:perform_in).and_return(nil)
+ end
+
+ it 'make the application updating' do
+ expect(application.cluster).not_to be_nil
+ service.execute
+
+ expect(application).to be_updating
+ end
+
+ it 'schedule async installation status check' do
+ expect(ClusterWaitForAppInstallationWorker).to receive(:perform_in).once
+
+ service.execute
+ end
+ end
+
+ context 'when kubernetes cluster communication fails' do
+ let(:error) { Kubeclient::HttpError.new(500, 'system failure', nil) }
+
+ before do
+ expect(helm_client).to receive(:update).with(update_command).and_raise(error)
+ end
+
+ it 'make the application errored' do
+ service.execute
+
+ expect(application).to be_update_errored
+ expect(application.status_reason).to match('Kubernetes error: 500')
+ end
+
+ it 'logs errors' do
+ expect(service.send(:logger)).to receive(:error).with(
+ {
+ exception: 'Kubeclient::HttpError',
+ message: 'system failure',
+ service: 'Clusters::Applications::PatchService',
+ app_id: application.id,
+ project_ids: application.cluster.project_ids,
+ group_ids: [],
+ error_code: 500
+ }
+ )
+
+ expect(Gitlab::Sentry).to receive(:track_acceptable_exception).with(
+ error,
+ extra: {
+ exception: 'Kubeclient::HttpError',
+ message: 'system failure',
+ service: 'Clusters::Applications::PatchService',
+ app_id: application.id,
+ project_ids: application.cluster.project_ids,
+ group_ids: [],
+ error_code: 500
+ }
+ )
+
+ service.execute
+ end
+ end
+
+ context 'a non kubernetes error happens' do
+ let(:application) { create(:clusters_applications_knative, :scheduled) }
+ let(:error) { StandardError.new('something bad happened') }
+
+ before do
+ expect(application).to receive(:make_updating!).once.and_raise(error)
+ end
+
+ it 'make the application errored' do
+ expect(helm_client).not_to receive(:update)
+
+ service.execute
+
+ expect(application).to be_update_errored
+ expect(application.status_reason).to eq("Can't start update process.")
+ end
+
+ it 'logs errors' do
+ expect(service.send(:logger)).to receive(:error).with(
+ {
+ exception: 'StandardError',
+ error_code: nil,
+ message: 'something bad happened',
+ service: 'Clusters::Applications::PatchService',
+ app_id: application.id,
+ project_ids: application.cluster.projects.pluck(:id),
+ group_ids: []
+ }
+ )
+
+ expect(Gitlab::Sentry).to receive(:track_acceptable_exception).with(
+ error,
+ extra: {
+ exception: 'StandardError',
+ error_code: nil,
+ message: 'something bad happened',
+ service: 'Clusters::Applications::PatchService',
+ app_id: application.id,
+ project_ids: application.cluster.projects.pluck(:id),
+ group_ids: []
+ }
+ )
+
+ service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/clusters/applications/update_service_spec.rb b/spec/services/clusters/applications/update_service_spec.rb
new file mode 100644
index 00000000000..2d299882af0
--- /dev/null
+++ b/spec/services/clusters/applications/update_service_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Applications::UpdateService do
+ include TestRequestHelpers
+
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:user) { create(:user) }
+ let(:params) { { application: 'knative', hostname: 'udpate.example.com' } }
+ let(:service) { described_class.new(cluster, user, params) }
+
+ subject { service.execute(test_request) }
+
+ describe '#execute' do
+ before do
+ allow(ClusterPatchAppWorker).to receive(:perform_async)
+ end
+
+ context 'application is not installed' do
+ it 'raises Clusters::Applications::BaseService::InvalidApplicationError' do
+ expect(ClusterPatchAppWorker).not_to receive(:perform_async)
+
+ expect { subject }
+ .to raise_exception { Clusters::Applications::BaseService::InvalidApplicationError }
+ .and not_change { Clusters::Applications::Knative.count }
+ .and not_change { Clusters::Applications::Knative.with_status(:scheduled).count }
+ end
+ end
+
+ context 'application is installed' do
+ context 'application is schedulable' do
+ let!(:application) do
+ create(:clusters_applications_knative, status: 3, cluster: cluster)
+ end
+
+ it 'updates the application data' do
+ expect do
+ subject
+ end.to change { application.reload.hostname }.to(params[:hostname])
+ end
+
+ it 'makes application scheduled!' do
+ subject
+
+ expect(application.reload).to be_scheduled
+ end
+
+ it 'schedules ClusterPatchAppWorker' do
+ expect(ClusterPatchAppWorker).to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'application is not schedulable' do
+ let!(:application) do
+ create(:clusters_applications_knative, status: 4, cluster: cluster)
+ end
+
+ it 'raises StateMachines::InvalidTransition' do
+ expect(ClusterPatchAppWorker).not_to receive(:perform_async)
+
+ expect { subject }
+ .to raise_exception { StateMachines::InvalidTransition }
+ .and not_change { application.reload.hostname }
+ .and not_change { Clusters::Applications::Knative.with_status(:scheduled).count }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/error_tracking/list_projects_service_spec.rb b/spec/services/error_tracking/list_projects_service_spec.rb
index 9f25a633deb..a92d3376f7b 100644
--- a/spec/services/error_tracking/list_projects_service_spec.rb
+++ b/spec/services/error_tracking/list_projects_service_spec.rb
@@ -32,7 +32,7 @@ describe ErrorTracking::ListProjectsService do
end
context 'set model attributes to new values' do
- let(:new_api_url) { new_api_host + 'api/0/projects/' }
+ let(:new_api_url) { new_api_host + 'api/0/projects/org/proj/' }
before do
expect(error_tracking_setting).to receive(:list_sentry_projects)
@@ -121,7 +121,7 @@ describe ErrorTracking::ListProjectsService do
context 'error_tracking_setting is nil' do
let(:error_tracking_setting) { build(:project_error_tracking_setting) }
- let(:new_api_url) { new_api_host + 'api/0/projects/' }
+ let(:new_api_url) { new_api_host + 'api/0/projects/org/proj/' }
before do
expect(project).to receive(:build_error_tracking_setting).once
diff --git a/spec/services/files/multi_service_spec.rb b/spec/services/files/multi_service_spec.rb
index 84c48d63c64..6842fa9f435 100644
--- a/spec/services/files/multi_service_spec.rb
+++ b/spec/services/files/multi_service_spec.rb
@@ -235,6 +235,22 @@ describe Files::MultiService do
expect(blob).to be_present
end
end
+
+ context 'when force is set to true and branch already exists' do
+ let(:commit_params) do
+ {
+ commit_message: commit_message,
+ branch_name: 'feature',
+ start_branch: 'master',
+ actions: actions,
+ force: true
+ }
+ end
+
+ it 'is still a success' do
+ expect(subject.execute[:status]).to eq(:success)
+ end
+ end
end
def update_file(path)
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 6b48c993c57..79d504b9b45 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -410,5 +410,34 @@ describe Groups::TransferService, :postgresql do
end
end
end
+
+ context 'when transferring a subgroup into root group' do
+ let(:group) { create(:group, :public, :nested) }
+ let(:subgroup) { create(:group, :public, parent: group) }
+ let(:transfer_service) { described_class.new(subgroup, user) }
+
+ it 'ensures there is still an owner for the transferred group' do
+ expect(subgroup.owners).to be_empty
+
+ transfer_service.execute(nil)
+ subgroup.reload
+
+ expect(subgroup.owners).to match_array(user)
+ end
+
+ context 'when group has explicit owner' do
+ let(:another_owner) { create(:user) }
+ let!(:another_member) { create(:group_member, :owner, group: subgroup, user: another_owner) }
+
+ it 'does not add additional owner' do
+ expect(subgroup.owners).to match_array(another_owner)
+
+ transfer_service.execute(nil)
+ subgroup.reload
+
+ expect(subgroup.owners).to match_array(another_owner)
+ end
+ end
+ end
end
end
diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb
index fa5d5ebac5c..0edc9016c96 100644
--- a/spec/services/issuable/common_system_notes_service_spec.rb
+++ b/spec/services/issuable/common_system_notes_service_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe Issuable::CommonSystemNotesService do
let(:user) { create(:user) }
let(:project) { create(:project) }
- let(:issuable) { create(:issue) }
+ let(:issuable) { create(:issue, project: project) }
context 'on issuable update' do
it_behaves_like 'system note creation', { title: 'New title' }, 'changed title'
@@ -70,7 +70,7 @@ describe Issuable::CommonSystemNotesService do
end
context 'on issuable create' do
- let(:issuable) { build(:issue) }
+ let(:issuable) { build(:issue, project: project) }
subject { described_class.new(project, user).execute(issuable, old_labels: [], is_update: false) }
diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb
index 248e7d5a389..86e58fe06b9 100644
--- a/spec/services/issues/build_service_spec.rb
+++ b/spec/services/issues/build_service_spec.rb
@@ -8,29 +8,29 @@ describe Issues::BuildService do
project.add_developer(user)
end
+ def build_issue(issue_params = {})
+ described_class.new(project, user, issue_params).execute
+ end
+
context 'for a single discussion' do
describe '#execute' do
let(:merge_request) { create(:merge_request, title: "Hello world", source_project: project) }
let(:discussion) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, note: "Almost done").to_discussion }
- let(:service) { described_class.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid, discussion_to_resolve: discussion.id) }
- it 'references the noteable title in the issue title' do
- issue = service.execute
+ subject { build_issue(merge_request_to_resolve_discussions_of: merge_request.iid, discussion_to_resolve: discussion.id) }
- expect(issue.title).to include('Hello world')
+ it 'references the noteable title in the issue title' do
+ expect(subject.title).to include('Hello world')
end
it 'adds the note content to the description' do
- issue = service.execute
-
- expect(issue.description).to include('Almost done')
+ expect(subject.description).to include('Almost done')
end
end
end
context 'for discussions in a merge request' do
let(:merge_request) { create(:merge_request_with_diff_notes, source_project: project) }
- let(:issue) { described_class.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid).execute }
describe '#items_for_discussions' do
it 'has an item for each discussion' do
@@ -66,28 +66,30 @@ describe Issues::BuildService do
end
describe '#execute' do
- it 'has the merge request reference in the title' do
- expect(issue.title).to include(merge_request.title)
- end
+ let(:base_params) { { merge_request_to_resolve_discussions_of: merge_request.iid } }
- it 'has the reference of the merge request in the description' do
- expect(issue.description).to include(merge_request.to_reference)
+ context 'without additional params' do
+ subject { build_issue(base_params) }
+
+ it 'has the merge request reference in the title' do
+ expect(subject.title).to include(merge_request.title)
+ end
+
+ it 'has the reference of the merge request in the description' do
+ expect(subject.description).to include(merge_request.to_reference)
+ end
end
- it 'does not assign title when a title was given' do
- issue = described_class.new(project, user,
- merge_request_to_resolve_discussions_of: merge_request,
- title: 'What an issue').execute
+ it 'uses provided title if title param given' do
+ issue = build_issue(base_params.merge(title: 'What an issue'))
expect(issue.title).to eq('What an issue')
end
- it 'does not assign description when a description was given' do
- issue = described_class.new(project, user,
- merge_request_to_resolve_discussions_of: merge_request,
- description: 'Fix at your earliest conveignance').execute
+ it 'uses provided description if description param given' do
+ issue = build_issue(base_params.merge(description: 'Fix at your earliest convenience'))
- expect(issue.description).to eq('Fix at your earliest conveignance')
+ expect(issue.description).to eq('Fix at your earliest convenience')
end
describe 'with multiple discussions' do
@@ -96,20 +98,20 @@ describe Issues::BuildService do
it 'mentions all the authors in the description' do
authors = merge_request.resolvable_discussions.map(&:author)
- expect(issue.description).to include(*authors.map(&:to_reference))
+ expect(build_issue(base_params).description).to include(*authors.map(&:to_reference))
end
it 'has a link for each unresolved discussion in the description' do
notes = merge_request.resolvable_discussions.map(&:first_note)
links = notes.map { |note| Gitlab::UrlBuilder.build(note) }
- expect(issue.description).to include(*links)
+ expect(build_issue(base_params).description).to include(*links)
end
it 'mentions additional notes' do
create_list(:diff_note_on_merge_request, 2, noteable: merge_request, project: merge_request.target_project, in_reply_to: diff_note)
- expect(issue.description).to include('(+2 comments)')
+ expect(build_issue(base_params).description).to include('(+2 comments)')
end
end
end
@@ -120,7 +122,7 @@ describe Issues::BuildService do
describe '#execute' do
it 'mentions the merge request in the description' do
- issue = described_class.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid).execute
+ issue = build_issue(merge_request_to_resolve_discussions_of: merge_request.iid)
expect(issue.description).to include("Review the conversation in #{merge_request.to_reference}")
end
@@ -128,20 +130,18 @@ describe Issues::BuildService do
end
describe '#execute' do
- let(:milestone) { create(:milestone, project: project) }
-
it 'builds a new issues with given params' do
- issue = described_class.new(
- project,
- user,
- title: 'Issue #1',
- description: 'Issue description',
- milestone_id: milestone.id
- ).execute
-
- expect(issue.title).to eq('Issue #1')
- expect(issue.description).to eq('Issue description')
+ milestone = create(:milestone, project: project)
+ issue = build_issue(milestone_id: milestone.id)
+
expect(issue.milestone).to eq(milestone)
end
+
+ it 'sets milestone to nil if it is not available for the project' do
+ milestone = create(:milestone, project: create(:project))
+ issue = build_issue(milestone_id: milestone.id)
+
+ expect(issue.milestone).to be_nil
+ end
end
end
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 931e47d3a77..f1684209729 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -356,7 +356,7 @@ describe Issues::UpdateService, :mailer do
it_behaves_like 'system notes for milestones'
it 'sends notifications for subscribers of changed milestone' do
- issue.milestone = create(:milestone)
+ issue.milestone = create(:milestone, project: project)
issue.save
@@ -380,7 +380,7 @@ describe Issues::UpdateService, :mailer do
end
it 'marks todos as done' do
- update_issue(milestone: create(:milestone))
+ update_issue(milestone: create(:milestone, project: project))
expect(todo.reload.done?).to eq true
end
@@ -389,7 +389,7 @@ describe Issues::UpdateService, :mailer do
it 'sends notifications for subscribers of changed milestone' do
perform_enqueued_jobs do
- update_issue(milestone: create(:milestone))
+ update_issue(milestone: create(:milestone, project: project))
end
should_email(subscriber)
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index 536d0d345a4..057e8137a4e 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -229,6 +229,15 @@ describe MergeRequests::BuildService do
end
end
end
+
+ context 'when a milestone is from another project' do
+ let(:milestone) { create(:milestone, project: create(:project)) }
+ let(:milestone_id) { milestone.id }
+
+ it 'sets milestone to nil' do
+ expect(merge_request.milestone).to be_nil
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index b46aa65818d..a04a4d5fc36 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -194,7 +194,7 @@ describe MergeRequests::CreateService do
merge_request.reload
expect(merge_request.merge_request_pipelines.count).to eq(1)
- expect(merge_request.actual_head_pipeline).to be_merge_request
+ expect(merge_request.actual_head_pipeline).to be_merge_request_event
end
context 'when there are no commits between source branch and target branch' do
@@ -226,7 +226,7 @@ describe MergeRequests::CreateService do
end
it 'sets the latest merge request pipeline as the head pipeline' do
- expect(merge_request.actual_head_pipeline).to be_merge_request
+ expect(merge_request.actual_head_pipeline).to be_merge_request_event
end
end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index ede79b87bcc..9d674263259 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -244,7 +244,7 @@ describe MergeRequests::MergeService do
service.execute(merge_request)
- expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
+ expect(merge_request.merge_error).to include("Something went wrong during merge pre-receive hook: #{error_message}")
expect(Rails.logger).to have_received(:error).with(a_string_matching(error_message))
end
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 96f2fde7117..fabca8f6b4a 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -19,27 +19,7 @@ describe MergeRequests::MergeToRefService do
end
end
- set(:user) { create(:user) }
- let(:merge_request) { create(:merge_request, :simple) }
- let(:project) { merge_request.project }
-
- before do
- project.add_maintainer(user)
- end
-
- describe '#execute' do
- let(:service) do
- described_class.new(project, user,
- commit_message: 'Awesome message',
- 'should_remove_source_branch' => true)
- end
-
- def process_merge_to_ref
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
- end
-
+ shared_examples_for 'successfully merges to ref with merge method' do
it 'writes commit to merge ref' do
repository = project.repository
target_ref = merge_request.merge_ref_path
@@ -52,9 +32,31 @@ describe MergeRequests::MergeToRefService do
expect(result[:status]).to eq(:success)
expect(result[:commit_id]).to be_present
+ expect(result[:source_id]).to eq(merge_request.source_branch_sha)
+ expect(result[:target_id]).to eq(merge_request.target_branch_sha)
expect(repository.ref_exists?(target_ref)).to be(true)
expect(ref_head.id).to eq(result[:commit_id])
end
+ end
+
+ shared_examples_for 'successfully evaluates pre-condition checks' do
+ it 'returns error when feature is disabled' do
+ stub_feature_flags(merge_to_tmp_merge_ref_path: false)
+
+ result = service.execute(merge_request)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Feature is not enabled')
+ end
+
+ it 'returns an error when the failing to process the merge' do
+ allow(project.repository).to receive(:merge_to_ref).and_return(nil)
+
+ result = service.execute(merge_request)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Conflicts detected during merge')
+ end
it 'does not send any mail' do
expect { process_merge_to_ref }.not_to change { ActionMailer::Base.deliveries.count }
@@ -73,25 +75,31 @@ describe MergeRequests::MergeToRefService do
process_merge_to_ref
end
+ end
- it 'returns error when feature is disabled' do
- stub_feature_flags(merge_to_tmp_merge_ref_path: false)
+ set(:user) { create(:user) }
+ let(:merge_request) { create(:merge_request, :simple) }
+ let(:project) { merge_request.project }
- result = service.execute(merge_request)
+ before do
+ project.add_maintainer(user)
+ end
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Feature is not enabled')
+ describe '#execute' do
+ let(:service) do
+ described_class.new(project, user, commit_message: 'Awesome message',
+ should_remove_source_branch: true)
end
- it 'returns an error when the failing to process the merge' do
- allow(project.repository).to receive(:merge_to_ref).and_return(nil)
-
- result = service.execute(merge_request)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Conflicts detected during merge')
+ def process_merge_to_ref
+ perform_enqueued_jobs do
+ service.execute(merge_request)
+ end
end
+ it_behaves_like 'successfully merges to ref with merge method'
+ it_behaves_like 'successfully evaluates pre-condition checks'
+
context 'commit history comparison with regular MergeService' do
let(:merge_ref_service) do
described_class.new(project, user, {})
@@ -122,29 +130,15 @@ describe MergeRequests::MergeToRefService do
context 'when semi-linear merge method' do
let(:merge_method) { :rebase_merge }
- it 'return error when MR should be able to fast-forward' do
- allow(merge_request).to receive(:should_be_rebased?) { true }
-
- error_message = 'Fast-forward merge is not possible. Please update your source branch.'
-
- result = service.execute(merge_request)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq(error_message)
- end
+ it_behaves_like 'successfully merges to ref with merge method'
+ it_behaves_like 'successfully evaluates pre-condition checks'
end
context 'when fast-forward merge method' do
let(:merge_method) { :ff }
- it 'returns error' do
- error_message = "Fast-forward to #{merge_request.merge_ref_path} is currently not supported."
-
- result = service.execute(merge_request)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq(error_message)
- end
+ it_behaves_like 'successfully merges to ref with merge method'
+ it_behaves_like 'successfully evaluates pre-condition checks'
end
context 'when MR is not mergeable to ref' do
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 9e9dc5a576c..43ceb1dcbee 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -173,12 +173,12 @@ describe MergeRequests::RefreshService do
it 'sets the latest merge request pipeline as a head pipeline' do
@merge_request.reload
- expect(@merge_request.actual_head_pipeline).to be_merge_request
+ expect(@merge_request.actual_head_pipeline).to be_merge_request_event
end
it 'returns pipelines in correct order' do
@merge_request.reload
- expect(@merge_request.all_pipelines.first).to be_merge_request
+ expect(@merge_request.all_pipelines.first).to be_merge_request_event
expect(@merge_request.all_pipelines.second).to be_push
end
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 20580bf14b9..8e367db031c 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -328,7 +328,7 @@ describe MergeRequests::UpdateService, :mailer do
it_behaves_like 'system notes for milestones'
it 'sends notifications for subscribers of changed milestone' do
- merge_request.milestone = create(:milestone)
+ merge_request.milestone = create(:milestone, project: project)
merge_request.save
@@ -352,7 +352,7 @@ describe MergeRequests::UpdateService, :mailer do
end
it 'marks pending todos as done' do
- update_merge_request({ milestone: create(:milestone) })
+ update_merge_request({ milestone: create(:milestone, project: project) })
expect(pending_todo.reload).to be_done
end
@@ -361,7 +361,7 @@ describe MergeRequests::UpdateService, :mailer do
it 'sends notifications for subscribers of changed milestone' do
perform_enqueued_jobs do
- update_merge_request(milestone: create(:milestone))
+ update_merge_request(milestone: create(:milestone, project: project))
end
should_email(subscriber)
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 1645b67c329..8d8e81173ff 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -220,6 +220,19 @@ describe Notes::CreateService do
expect(note.note).to eq "HELLO\nWORLD"
end
end
+
+ context 'when note only have commands' do
+ it 'adds commands applied message to note errors' do
+ note_text = %(/close)
+ service = double(:service)
+ allow(Issues::UpdateService).to receive(:new).and_return(service)
+ expect(service).to receive(:execute)
+
+ note = described_class.new(project, user, opts.merge(note: note_text)).execute
+
+ expect(note.errors[:commands_only]).to be_present
+ end
+ end
end
context 'as a user who cannot update the target' do
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index 14d62763a5b..7d2b6d5b8a7 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -28,8 +28,8 @@ describe Notes::QuickActionsService do
end
it 'closes noteable, sets labels, assigns, and sets milestone to noteable, and leave no note' do
- content, command_params = service.extract_commands(note)
- service.execute(command_params, note)
+ content, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
expect(content).to eq ''
expect(note.noteable).to be_closed
@@ -47,8 +47,8 @@ describe Notes::QuickActionsService do
let(:note_text) { '/reopen' }
it 'opens the noteable, and leave no note' do
- content, command_params = service.extract_commands(note)
- service.execute(command_params, note)
+ content, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
expect(content).to eq ''
expect(note.noteable).to be_open
@@ -59,8 +59,8 @@ describe Notes::QuickActionsService do
let(:note_text) { '/spend 1h' }
it 'updates the spent time on the noteable' do
- content, command_params = service.extract_commands(note)
- service.execute(command_params, note)
+ content, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
expect(content).to eq ''
expect(note.noteable.time_spent).to eq(3600)
@@ -75,8 +75,8 @@ describe Notes::QuickActionsService do
end
it 'closes noteable, sets labels, assigns, and sets milestone to noteable' do
- content, command_params = service.extract_commands(note)
- service.execute(command_params, note)
+ content, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
expect(content).to eq "HELLO\nWORLD"
expect(note.noteable).to be_closed
@@ -94,8 +94,8 @@ describe Notes::QuickActionsService do
let(:note_text) { "HELLO\n/reopen\nWORLD" }
it 'opens the noteable' do
- content, command_params = service.extract_commands(note)
- service.execute(command_params, note)
+ content, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
expect(content).to eq "HELLO\nWORLD"
expect(note.noteable).to be_open
@@ -190,8 +190,8 @@ describe Notes::QuickActionsService do
end
it 'adds only one assignee from the list' do
- _, command_params = service.extract_commands(note)
- service.execute(command_params, note)
+ _, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
expect(note.noteable.assignees.count).to eq(1)
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 6a5a6989607..9ba4a11104a 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -177,7 +177,7 @@ describe NotificationService, :mailer do
end
end
- context 'when recieving a non-existent method' do
+ context 'when receiving a non-existent method' do
it 'raises NoMethodError' do
expect { async.foo(key) }.to raise_error(NoMethodError)
end
diff --git a/spec/services/projects/group_links/create_service_spec.rb b/spec/services/projects/group_links/create_service_spec.rb
index ffb270d277e..68fd82b4cbe 100644
--- a/spec/services/projects/group_links/create_service_spec.rb
+++ b/spec/services/projects/group_links/create_service_spec.rb
@@ -12,6 +12,10 @@ describe Projects::GroupLinks::CreateService, '#execute' do
end
let(:subject) { described_class.new(project, user, opts) }
+ before do
+ group.add_developer(user)
+ end
+
it 'adds group to project' do
expect { subject.execute(group) }.to change { project.project_group_links.count }.from(0).to(1)
end
@@ -19,4 +23,8 @@ describe Projects::GroupLinks::CreateService, '#execute' do
it 'returns false if group is blank' do
expect { subject.execute(nil) }.not_to change { project.project_group_links.count }
end
+
+ it 'returns error if user is not allowed to share with a group' do
+ expect { subject.execute(create :group) }.not_to change { project.project_group_links.count }
+ end
end
diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
index 61dbb57ec08..639dd930618 100644
--- a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb
@@ -70,10 +70,10 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
FileUtils.mkdir_p(base_path(hashed_storage))
end
- it 'raises AttachmentMigrationError' do
+ it 'raises AttachmentCannotMoveError' do
expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
- expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentMigrationError)
+ expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentCannotMoveError)
end
end
end
@@ -86,6 +86,8 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
context '#new_disk_path' do
it 'returns new disk_path for project' do
+ service.execute
+
expect(service.new_disk_path).to eq(project.disk_path)
end
end
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
index 0772dc4b85b..e77e2198439 100644
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -28,7 +28,17 @@ describe Projects::HashedStorage::MigrateRepositoryService do
it 'fails when a git operation is in progress' do
allow(project).to receive(:repo_reference_count) { 1 }
- expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryMigrationError)
+ expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryInUseError)
+ end
+ end
+
+ context 'when repository doesnt exist on disk' do
+ let(:project) { create(:project, :legacy_storage) }
+
+ it 'skips the disk change but increase the version' do
+ service.execute
+
+ expect(project.hashed_storage?(:repository)).to be_truthy
end
end
diff --git a/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
new file mode 100644
index 00000000000..6f4154d6011
--- /dev/null
+++ b/spec/services/projects/hashed_storage/rollback_attachments_service_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::HashedStorage::RollbackAttachmentsService do
+ subject(:service) { described_class.new(project, logger: nil) }
+
+ let(:project) { create(:project, :repository, skip_disk_validation: true) }
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+ let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
+ let(:file_uploader) { build(:file_uploader, project: project) }
+ let(:old_disk_path) { File.join(base_path(hashed_storage), upload.path) }
+ let(:new_disk_path) { File.join(base_path(legacy_storage), upload.path) }
+
+ context '#execute' do
+ context 'when succeeds' do
+ it 'moves attachments to legacy storage layout' do
+ expect(File.file?(old_disk_path)).to be_truthy
+ expect(File.file?(new_disk_path)).to be_falsey
+ expect(File.exist?(base_path(hashed_storage))).to be_truthy
+ expect(File.exist?(base_path(legacy_storage))).to be_falsey
+ expect(FileUtils).to receive(:mv).with(base_path(hashed_storage), base_path(legacy_storage)).and_call_original
+
+ service.execute
+
+ expect(File.exist?(base_path(legacy_storage))).to be_truthy
+ expect(File.exist?(base_path(hashed_storage))).to be_falsey
+ expect(File.file?(old_disk_path)).to be_falsey
+ expect(File.file?(new_disk_path)).to be_truthy
+ end
+
+ it 'returns true' do
+ expect(service.execute).to be_truthy
+ end
+
+ it 'sets skipped to false' do
+ service.execute
+
+ expect(service.skipped?).to be_falsey
+ end
+ end
+
+ context 'when original folder does not exist anymore' do
+ before do
+ FileUtils.rm_rf(base_path(hashed_storage))
+ end
+
+ it 'skips moving folders and go to next' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(hashed_storage), base_path(legacy_storage))
+
+ service.execute
+
+ expect(File.exist?(base_path(legacy_storage))).to be_falsey
+ expect(File.file?(new_disk_path)).to be_falsey
+ end
+
+ it 'returns true' do
+ expect(service.execute).to be_truthy
+ end
+
+ it 'sets skipped to true' do
+ service.execute
+
+ expect(service.skipped?).to be_truthy
+ end
+ end
+
+ context 'when target folder already exists' do
+ before do
+ FileUtils.mkdir_p(base_path(legacy_storage))
+ end
+
+ it 'raises AttachmentCannotMoveError' do
+ expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
+
+ expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentCannotMoveError)
+ end
+ end
+ end
+
+ context '#old_disk_path' do
+ it 'returns old disk_path for project' do
+ expect(service.old_disk_path).to eq(project.disk_path)
+ end
+ end
+
+ context '#new_disk_path' do
+ it 'returns new disk_path for project' do
+ service.execute
+
+ expect(service.new_disk_path).to eq(project.full_path)
+ end
+ end
+
+ def base_path(storage)
+ File.join(FileUploader.root, storage.disk_path)
+ end
+end
diff --git a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
new file mode 100644
index 00000000000..41927934501
--- /dev/null
+++ b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis_shared_state do
+ include GitHelpers
+
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:project) { create(:project, :repository, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
+ let(:legacy_storage) { Storage::LegacyProject.new(project) }
+ let(:hashed_storage) { Storage::HashedProject.new(project) }
+
+ subject(:service) { described_class.new(project, project.disk_path) }
+
+ describe '#execute' do
+ let(:old_disk_path) { hashed_storage.disk_path }
+ let(:new_disk_path) { legacy_storage.disk_path }
+
+ before do
+ allow(service).to receive(:gitlab_shell) { gitlab_shell }
+ end
+
+ context 'repository lock' do
+ it 'tries to lock the repository' do
+ expect(service).to receive(:try_to_set_repository_read_only!)
+
+ service.execute
+ end
+
+ it 'fails when a git operation is in progress' do
+ allow(project).to receive(:repo_reference_count) { 1 }
+
+ expect { service.execute }.to raise_error(Projects::HashedStorage::RepositoryInUseError)
+ end
+ end
+
+ context 'when repository doesnt exist on disk' do
+ let(:project) { create(:project) }
+
+ it 'skips the disk change but decrease the version' do
+ service.execute
+
+ expect(project.legacy_storage?).to be_truthy
+ end
+ end
+
+ context 'when succeeds' do
+ it 'renames project and wiki repositories' do
+ service.execute
+
+ expect(gitlab_shell.exists?(project.repository_storage, "#{new_disk_path}.git")).to be_truthy
+ expect(gitlab_shell.exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_truthy
+ end
+
+ it 'updates project to be legacy and not read-only' do
+ service.execute
+
+ expect(project.legacy_storage?).to be_truthy
+ expect(project.repository_read_only).to be_falsey
+ end
+
+ it 'move operation is called for both repositories' do
+ expect_move_repository(old_disk_path, new_disk_path)
+ expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
+
+ service.execute
+ end
+
+ it 'writes project full path to .git/config' do
+ service.execute
+
+ rugged_config = rugged_repo(project.repository).config['gitlab.fullpath']
+
+ expect(rugged_config).to eq project.full_path
+ end
+ end
+
+ context 'when one move fails' do
+ it 'rolls repositories back to original name' do
+ allow(service).to receive(:move_repository).and_call_original
+ allow(service).to receive(:move_repository).with(old_disk_path, new_disk_path).once { false } # will disable first move only
+
+ expect(service).to receive(:rollback_folder_move).and_call_original
+
+ service.execute
+
+ expect(gitlab_shell.exists?(project.repository_storage, "#{new_disk_path}.git")).to be_falsey
+ expect(gitlab_shell.exists?(project.repository_storage, "#{new_disk_path}.wiki.git")).to be_falsey
+ expect(project.repository_read_only?).to be_falsey
+ end
+
+ context 'when rollback fails' do
+ before do
+ legacy_storage.ensure_storage_path_exists
+ gitlab_shell.mv_repository(project.repository_storage, old_disk_path, new_disk_path)
+ end
+
+ it 'does not try to move nil repository over existing' do
+ expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, old_disk_path, new_disk_path)
+ expect_move_repository("#{old_disk_path}.wiki", "#{new_disk_path}.wiki")
+
+ service.execute
+ end
+ end
+ end
+
+ def expect_move_repository(from_name, to_name)
+ expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage, from_name, to_name).and_call_original
+ end
+ end
+end
diff --git a/spec/services/projects/hashed_storage/rollback_service_spec.rb b/spec/services/projects/hashed_storage/rollback_service_spec.rb
new file mode 100644
index 00000000000..427d1535559
--- /dev/null
+++ b/spec/services/projects/hashed_storage/rollback_service_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::HashedStorage::RollbackService do
+ let(:project) { create(:project, :empty_repo, :wiki_repo) }
+ let(:logger) { double }
+
+ subject(:service) { described_class.new(project, project.full_path, logger: logger) }
+
+ describe '#execute' do
+ context 'attachments rollback' do
+ let(:attachments_service_class) { Projects::HashedStorage::RollbackAttachmentsService }
+ let(:attachments_service) { attachments_service_class.new(project, logger: logger) }
+
+ it 'delegates rollback to Projects::HashedStorage::RollbackAttachmentsService' do
+ expect(attachments_service_class).to receive(:new)
+ .with(project, logger: logger)
+ .and_return(attachments_service)
+ expect(attachments_service).to receive(:execute)
+
+ service.execute
+ end
+
+ it 'does not delegate rollback if repository is in legacy storage already' do
+ project.storage_version = nil
+ expect(attachments_service_class).not_to receive(:new)
+
+ service.execute
+ end
+ end
+
+ context 'repository rollback' do
+ let(:repository_service_class) { Projects::HashedStorage::RollbackRepositoryService }
+ let(:repository_service) { repository_service_class.new(project, project.full_path, logger: logger) }
+
+ it 'delegates rollback to RollbackRepositoryService' do
+ project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:repository]
+
+ expect(repository_service_class).to receive(:new)
+ .with(project, project.full_path, logger: logger)
+ .and_return(repository_service)
+ expect(repository_service).to receive(:execute)
+
+ service.execute
+ end
+
+ it 'does not delegate rollback if repository is in legacy storage already' do
+ project.storage_version = nil
+
+ expect(repository_service_class).not_to receive(:new)
+
+ service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 6afae3da80c..86b1ec83f50 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -17,8 +17,14 @@ describe Projects::Operations::UpdateService do
{
error_tracking_setting_attributes: {
enabled: false,
- api_url: 'http://gitlab.com/api/0/projects/org/project',
- token: 'token'
+ api_host: 'http://gitlab.com/',
+ token: 'token',
+ project: {
+ slug: 'project',
+ name: 'Project',
+ organization_slug: 'org',
+ organization_name: 'Org'
+ }
}
}
end
@@ -32,8 +38,30 @@ describe Projects::Operations::UpdateService do
project.reload
expect(project.error_tracking_setting).not_to be_enabled
- expect(project.error_tracking_setting.api_url).to eq('http://gitlab.com/api/0/projects/org/project')
+ expect(project.error_tracking_setting.api_url).to eq(
+ 'http://gitlab.com/api/0/projects/org/project/'
+ )
expect(project.error_tracking_setting.token).to eq('token')
+ expect(project.error_tracking_setting[:project_name]).to eq('Project')
+ expect(project.error_tracking_setting[:organization_name]).to eq('Org')
+ end
+
+ context 'disable error tracking' do
+ before do
+ params[:error_tracking_setting_attributes][:api_host] = ''
+ params[:error_tracking_setting_attributes][:enabled] = false
+ end
+
+ it 'can set api_url to nil' do
+ expect(result[:status]).to eq(:success)
+
+ project.reload
+ expect(project.error_tracking_setting).not_to be_enabled
+ expect(project.error_tracking_setting.api_url).to be_nil
+ expect(project.error_tracking_setting.token).to eq('token')
+ expect(project.error_tracking_setting[:project_name]).to eq('Project')
+ expect(project.error_tracking_setting[:organization_name]).to eq('Org')
+ end
end
end
@@ -42,8 +70,14 @@ describe Projects::Operations::UpdateService do
{
error_tracking_setting_attributes: {
enabled: true,
- api_url: 'http://gitlab.com/api/0/projects/org/project',
- token: 'token'
+ api_host: 'http://gitlab.com/',
+ token: 'token',
+ project: {
+ slug: 'project',
+ name: 'Project',
+ organization_slug: 'org',
+ organization_name: 'Org'
+ }
}
}
end
@@ -52,8 +86,12 @@ describe Projects::Operations::UpdateService do
expect(result[:status]).to eq(:success)
expect(project.error_tracking_setting).to be_enabled
- expect(project.error_tracking_setting.api_url).to eq('http://gitlab.com/api/0/projects/org/project')
+ expect(project.error_tracking_setting.api_url).to eq(
+ 'http://gitlab.com/api/0/projects/org/project/'
+ )
expect(project.error_tracking_setting.token).to eq('token')
+ expect(project.error_tracking_setting[:project_name]).to eq('Project')
+ expect(project.error_tracking_setting[:organization_name]).to eq('Org')
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 938764f40b0..ea33d156c8a 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -1526,5 +1526,15 @@ describe QuickActions::InterpretService do
end
end
end
+
+ context "#commands_executed_count" do
+ it 'counts commands executed' do
+ content = "/close and \n/assign me and \n/title new title"
+
+ service.execute(content, issue)
+
+ expect(service.commands_executed_count).to eq(3)
+ end
+ end
end
end
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index 8e77d582eb4..fe85b5c9065 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -362,6 +362,17 @@ describe Suggestions::ApplyService do
project.add_maintainer(user)
end
+ context 'diff file was not found' do
+ it 'returns error message' do
+ expect(suggestion.note).to receive(:latest_diff_file) { nil }
+
+ result = subject.execute(suggestion)
+
+ expect(result).to eq(message: 'The file was not found',
+ status: :error)
+ end
+ end
+
context 'suggestion was already applied' do
it 'returns success status' do
result = subject.execute(suggestion)
diff --git a/spec/services/suggestions/create_service_spec.rb b/spec/services/suggestions/create_service_spec.rb
index f1142c88a69..1b4b15b8eaa 100644
--- a/spec/services/suggestions/create_service_spec.rb
+++ b/spec/services/suggestions/create_service_spec.rb
@@ -9,14 +9,18 @@ describe Suggestions::CreateService do
target_project: project_with_repo)
end
- let(:position) do
- Gitlab::Diff::Position.new(old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
- old_line: nil,
- new_line: 14,
- diff_refs: merge_request.diff_refs)
+ def build_position(args = {})
+ default_args = { old_path: "files/ruby/popen.rb",
+ new_path: "files/ruby/popen.rb",
+ old_line: nil,
+ new_line: 14,
+ diff_refs: merge_request.diff_refs }
+
+ Gitlab::Diff::Position.new(default_args.merge(args))
end
+ let(:position) { build_position }
+
let(:markdown) do
<<-MARKDOWN.strip_heredoc
```suggestion
@@ -74,6 +78,21 @@ describe Suggestions::CreateService do
end
end
+ context 'should not create suggestions' do
+ let(:note) do
+ create(:diff_note_on_merge_request, project: project_with_repo,
+ noteable: merge_request,
+ position: position,
+ note: markdown)
+ end
+
+ it 'creates no suggestion when diff file is not found' do
+ expect(note).to receive(:latest_diff_file) { nil }
+
+ expect { subject.execute }.not_to change(Suggestion, :count)
+ end
+ end
+
context 'should create suggestions' do
let(:note) do
create(:diff_note_on_merge_request, project: project_with_repo,
@@ -104,6 +123,22 @@ describe Suggestions::CreateService do
expect(suggestion_2).to have_attributes(from_content: " vars = {\n",
to_content: " xpto\n baz\n")
end
+
+ context 'outdated position note' do
+ let!(:outdated_diff) { merge_request.merge_request_diff }
+ let!(:latest_diff) { merge_request.create_merge_request_diff }
+ let(:outdated_position) { build_position(diff_refs: outdated_diff.diff_refs) }
+ let(:position) { build_position(diff_refs: latest_diff.diff_refs) }
+
+ it 'uses the correct position when creating the suggestion' do
+ expect(note.position)
+ .to receive(:diff_file)
+ .with(project_with_repo.repository)
+ .and_call_original
+
+ subject.execute
+ end
+ end
end
end
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 82544ab0413..b917de14b2e 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -807,9 +807,10 @@ describe SystemNoteService do
expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
body: hash_including(
GlobalID: "GitLab",
+ relationship: 'mentioned on',
object: {
url: project_commit_url(project, commit),
- title: "GitLab: Mentioned on commit - #{commit.title}",
+ title: "Commit - #{commit.title}",
icon: { title: "GitLab", url16x16: favicon_path },
status: { resolved: false }
}
@@ -833,9 +834,10 @@ describe SystemNoteService do
expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
body: hash_including(
GlobalID: "GitLab",
+ relationship: 'mentioned on',
object: {
url: project_issue_url(project, issue),
- title: "GitLab: Mentioned on issue - #{issue.title}",
+ title: "Issue - #{issue.title}",
icon: { title: "GitLab", url16x16: favicon_path },
status: { resolved: false }
}
@@ -859,9 +861,10 @@ describe SystemNoteService do
expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
body: hash_including(
GlobalID: "GitLab",
+ relationship: 'mentioned on',
object: {
url: project_snippet_url(project, snippet),
- title: "GitLab: Mentioned on snippet - #{snippet.title}",
+ title: "Snippet - #{snippet.title}",
icon: { title: "GitLab", url16x16: favicon_path },
status: { resolved: false }
}
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 97e7a019222..e8d7b18bf04 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -115,10 +115,17 @@ RSpec.configure do |config|
TestEnv.clean_test_path
end
- config.before do
+ config.before do |example|
# Enable all features by default for testing
allow(Feature).to receive(:enabled?) { true }
+ enabled = example.metadata[:enable_rugged].present?
+
+ # Disable Rugged features by default
+ Gitlab::Git::RuggedImpl::Repository::FEATURE_FLAGS.each do |flag|
+ allow(Feature).to receive(:enabled?).with(flag).and_return(enabled)
+ end
+
# The following can be removed when we remove the staged rollout strategy
# and we can just enable it using instance wide settings
# (ie. ApplicationSetting#auto_devops_enabled)
diff --git a/spec/support/api/schema_matcher.rb b/spec/support/api/schema_matcher.rb
index 6591d56e473..f7d74df0656 100644
--- a/spec/support/api/schema_matcher.rb
+++ b/spec/support/api/schema_matcher.rb
@@ -1,6 +1,6 @@
module SchemaPath
def self.expand(schema, dir = '')
- Rails.root.join('spec', dir, "fixtures/api/schemas/#{schema}.json").to_s
+ Rails.root.join(dir, 'spec', "fixtures/api/schemas/#{schema}.json").to_s
end
end
diff --git a/spec/support/database_cleaner.rb b/spec/support/database_cleaner.rb
new file mode 100644
index 00000000000..edd7de94203
--- /dev/null
+++ b/spec/support/database_cleaner.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'database_cleaner/active_record/deletion'
+require_relative 'db_cleaner'
+
+module FakeInformationSchema
+ # Work around a bug in DatabaseCleaner when using the deletion strategy:
+ # https://github.com/DatabaseCleaner/database_cleaner/issues/347
+ #
+ # On MySQL, if the information schema is said to exist, we use an inaccurate
+ # row count leading to some tables not being cleaned when they should
+ def information_schema_exists?(_connection)
+ false
+ end
+end
+
+DatabaseCleaner::ActiveRecord::Deletion.prepend(FakeInformationSchema)
+
+RSpec.configure do |config|
+ include DbCleaner
+
+ # Ensure all sequences are reset at the start of the suite run
+ config.before(:suite) do
+ setup_database_cleaner
+ DatabaseCleaner.clean_with(:truncation)
+ end
+
+ config.append_after(:context) do
+ DatabaseCleaner.clean_with(:deletion, cache_tables: false)
+ end
+
+ config.before do
+ setup_database_cleaner
+ DatabaseCleaner.strategy = :transaction
+ end
+
+ config.before(:each, :js) do
+ DatabaseCleaner.strategy = :deletion, { except: deletion_except_tables, cache_tables: false }
+ end
+
+ config.before(:each, :delete) do
+ DatabaseCleaner.strategy = :deletion, { except: deletion_except_tables, cache_tables: false }
+ end
+
+ config.before(:each, :migration) do
+ DatabaseCleaner.strategy = :deletion, { cache_tables: false }
+ end
+
+ config.before do
+ DatabaseCleaner.start
+ end
+
+ config.append_after do
+ DatabaseCleaner.clean
+ end
+end
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index 34b9efaaecd..c69fa322073 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -1,49 +1,9 @@
-require 'database_cleaner/active_record/deletion'
-
-module FakeInformationSchema
- # Work around a bug in DatabaseCleaner when using the deletion strategy:
- # https://github.com/DatabaseCleaner/database_cleaner/issues/347
- #
- # On MySQL, if the information schema is said to exist, we use an inaccurate
- # row count leading to some tables not being cleaned when they should
- def information_schema_exists?(_connection)
- false
- end
-end
-
-DatabaseCleaner::ActiveRecord::Deletion.prepend(FakeInformationSchema)
-
-RSpec.configure do |config|
- # Ensure all sequences are reset at the start of the suite run
- config.before(:suite) do
- DatabaseCleaner.clean_with(:truncation)
- end
-
- config.append_after(:context) do
- DatabaseCleaner.clean_with(:deletion, cache_tables: false)
- end
-
- config.before do
- DatabaseCleaner.strategy = :transaction
- end
-
- config.before(:each, :js) do
- DatabaseCleaner.strategy = :deletion, { cache_tables: false }
- end
-
- config.before(:each, :delete) do
- DatabaseCleaner.strategy = :deletion, { cache_tables: false }
- end
-
- config.before(:each, :migration) do
- DatabaseCleaner.strategy = :deletion, { cache_tables: false }
- end
-
- config.before do
- DatabaseCleaner.start
+module DbCleaner
+ def deletion_except_tables
+ []
end
- config.append_after do
- DatabaseCleaner.clean
+ def setup_database_cleaner
+ DatabaseCleaner[:active_record, { connection: ActiveRecord::Base }]
end
end
diff --git a/spec/support/features/reportable_note_shared_examples.rb b/spec/support/features/reportable_note_shared_examples.rb
index 8cfce49da8a..89dfbf931d2 100644
--- a/spec/support/features/reportable_note_shared_examples.rb
+++ b/spec/support/features/reportable_note_shared_examples.rb
@@ -41,7 +41,7 @@ shared_examples 'reportable note' do |type|
def open_dropdown(dropdown)
# make window wide enough that tooltip doesn't trigger horizontal scrollbar
- resize_window(1200, 800)
+ restore_window_size
dropdown.find('.more-actions-toggle').click
dropdown.find('.dropdown-menu li', match: :first)
diff --git a/spec/support/features/variable_list_shared_examples.rb b/spec/support/features/variable_list_shared_examples.rb
index 0a464d77cb7..73156d18c1b 100644
--- a/spec/support/features/variable_list_shared_examples.rb
+++ b/spec/support/features/variable_list_shared_examples.rb
@@ -8,7 +8,7 @@ shared_examples 'variable list' do
it 'adds new CI variable' do
page.within('.js-ci-variable-list-section .js-row:last-child') do
find('.js-ci-variable-input-key').set('key')
- find('.js-ci-variable-input-value').set('key value')
+ find('.js-ci-variable-input-value').set('key_value')
end
click_button('Save variables')
@@ -19,7 +19,7 @@ shared_examples 'variable list' do
# We check the first row because it re-sorts to alphabetical order on refresh
page.within('.js-ci-variable-list-section .js-row:nth-child(1)') do
expect(find('.js-ci-variable-input-key').value).to eq('key')
- expect(find('.js-ci-variable-input-value', visible: false).value).to eq('key value')
+ expect(find('.js-ci-variable-input-value', visible: false).value).to eq('key_value')
end
end
@@ -44,7 +44,7 @@ shared_examples 'variable list' do
it 'adds new protected variable' do
page.within('.js-ci-variable-list-section .js-row:last-child') do
find('.js-ci-variable-input-key').set('key')
- find('.js-ci-variable-input-value').set('key value')
+ find('.js-ci-variable-input-value').set('key_value')
find('.ci-variable-protected-item .js-project-feature-toggle').click
expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true')
@@ -58,7 +58,7 @@ shared_examples 'variable list' do
# We check the first row because it re-sorts to alphabetical order on refresh
page.within('.js-ci-variable-list-section .js-row:nth-child(1)') do
expect(find('.js-ci-variable-input-key').value).to eq('key')
- expect(find('.js-ci-variable-input-value', visible: false).value).to eq('key value')
+ expect(find('.js-ci-variable-input-value', visible: false).value).to eq('key_value')
expect(find('.js-ci-variable-input-protected', visible: false).value).to eq('true')
end
end
diff --git a/spec/support/helpers/file_mover_helpers.rb b/spec/support/helpers/file_mover_helpers.rb
new file mode 100644
index 00000000000..1ba7cc03354
--- /dev/null
+++ b/spec/support/helpers/file_mover_helpers.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module FileMoverHelpers
+ def stub_file_mover(file_path, stub_real_path: nil)
+ file_name = File.basename(file_path)
+ allow(Pathname).to receive(:new).and_call_original
+
+ expect_next_instance_of(Pathname, a_string_including(file_name)) do |pathname|
+ allow(pathname).to receive(:realpath) { stub_real_path || pathname.cleanpath }
+ end
+ end
+end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index e468ee4676d..6cdc19ac2e5 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -77,13 +77,23 @@ module GraphqlHelpers
def query_graphql_field(name, attributes = {}, fields = nil)
fields ||= all_graphql_fields_for(name.classify)
attributes = attributes_to_graphql(attributes)
+ attributes = "(#{attributes})" if attributes.present?
<<~QUERY
- #{name}(#{attributes}) {
- #{fields}
- }
+ #{name}#{attributes}
+ #{wrap_fields(fields)}
QUERY
end
+ def wrap_fields(fields)
+ return unless fields.strip.present?
+
+ <<~FIELDS
+ {
+ #{fields}
+ }
+ FIELDS
+ end
+
def all_graphql_fields_for(class_name, parent_types = Set.new)
type = GitlabSchema.types[class_name.to_s]
return "" unless type
@@ -115,8 +125,8 @@ module GraphqlHelpers
end.join(", ")
end
- def post_graphql(query, current_user: nil, variables: nil)
- post api('/', current_user, version: 'graphql'), params: { query: query, variables: variables }
+ def post_graphql(query, current_user: nil, variables: nil, headers: {})
+ post api('/', current_user, version: 'graphql'), params: { query: query, variables: variables }, headers: headers
end
def post_graphql_mutation(mutation, current_user: nil)
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 89c5ec7a718..f525b2f945e 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -2,11 +2,12 @@ require 'action_dispatch/testing/test_request'
require 'fileutils'
module JavaScriptFixturesHelpers
+ extend ActiveSupport::Concern
include Gitlab::Popen
- FIXTURE_PATH = 'spec/javascripts/fixtures'.freeze
+ FIXTURE_PATHS = %w[spec/javascripts/fixtures ee/spec/javascripts/fixtures].freeze
- def self.included(base)
+ included do |base|
base.around do |example|
# pick an arbitrary date from the past, so tests are not time dependent
Timecop.freeze(Time.utc(2015, 7, 3, 10)) { example.run }
@@ -15,26 +16,30 @@ module JavaScriptFixturesHelpers
# Public: Removes all fixture files from given directory
#
- # directory_name - directory of the fixtures (relative to FIXTURE_PATH)
+ # directory_name - directory of the fixtures (relative to FIXTURE_PATHS)
#
def clean_frontend_fixtures(directory_name)
- directory_name = File.expand_path(directory_name, FIXTURE_PATH)
- Dir[File.expand_path('*.html.raw', directory_name)].each do |file_name|
- FileUtils.rm(file_name)
+ FIXTURE_PATHS.each do |fixture_path|
+ directory_name = File.expand_path(directory_name, fixture_path)
+ Dir[File.expand_path('*.html.raw', directory_name)].each do |file_name|
+ FileUtils.rm(file_name)
+ end
end
end
# Public: Store a response object as fixture file
#
# response - string or response object to store
- # fixture_file_name - file name to store the fixture in (relative to FIXTURE_PATH)
+ # fixture_file_name - file name to store the fixture in (relative to FIXTURE_PATHS)
#
def store_frontend_fixture(response, fixture_file_name)
- fixture_file_name = File.expand_path(fixture_file_name, FIXTURE_PATH)
- fixture = response.respond_to?(:body) ? parse_response(response) : response
+ FIXTURE_PATHS.each do |fixture_path|
+ fixture_file_name = File.expand_path(fixture_file_name, fixture_path)
+ fixture = response.respond_to?(:body) ? parse_response(response) : response
- FileUtils.mkdir_p(File.dirname(fixture_file_name))
- File.write(fixture_file_name, fixture)
+ FileUtils.mkdir_p(File.dirname(fixture_file_name))
+ File.write(fixture_file_name, fixture)
+ end
end
def remove_repository(project)
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 9dc89b483b2..cca11e112c9 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -9,6 +9,10 @@ module KubernetesHelpers
kube_response(kube_pods_body)
end
+ def kube_logs_response
+ kube_response(kube_logs_body)
+ end
+
def kube_deployments_response
kube_response(kube_deployments_body)
end
@@ -34,6 +38,13 @@ module KubernetesHelpers
WebMock.stub_request(:get, pods_url).to_return(response || kube_pods_response)
end
+ def stub_kubeclient_logs(pod_name, response = nil)
+ stub_kubeclient_discover(service.api_url)
+ logs_url = service.api_url + "/api/v1/namespaces/#{service.actual_namespace}/pods/#{pod_name}/log?tailLines=#{Clusters::Platforms::Kubernetes::LOGS_LIMIT}"
+
+ WebMock.stub_request(:get, logs_url).to_return(response || kube_logs_response)
+ end
+
def stub_kubeclient_deployments(response = nil)
stub_kubeclient_discover(service.api_url)
deployments_url = service.api_url + "/apis/extensions/v1beta1/namespaces/#{service.actual_namespace}/deployments"
@@ -212,6 +223,10 @@ module KubernetesHelpers
}
end
+ def kube_logs_body
+ "Log 1\nLog 2\nLog 3"
+ end
+
def kube_deployments_body
{
"kind" => "DeploymentList",
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 3fee6872498..4a0cf62a661 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -47,7 +47,7 @@ module LoginHelpers
end
def gitlab_sign_in_via(provider, user, uid, saml_response = nil)
- mock_auth_hash(provider, uid, user.email, saml_response)
+ mock_auth_hash_with_saml_xml(provider, uid, user.email, saml_response)
visit new_user_session_path
click_link provider
end
@@ -87,7 +87,12 @@ module LoginHelpers
click_link "oauth-login-#{provider}"
end
- def mock_auth_hash(provider, uid, email, saml_response = nil)
+ def mock_auth_hash_with_saml_xml(provider, uid, email, saml_response)
+ response_object = { document: saml_xml(saml_response) }
+ mock_auth_hash(provider, uid, email, response_object: response_object)
+ end
+
+ def mock_auth_hash(provider, uid, email, response_object: nil)
# The mock_auth configuration allows you to set per-provider (or default)
# authentication hashes to return during integration testing.
OmniAuth.config.mock_auth[provider.to_sym] = OmniAuth::AuthHash.new({
@@ -110,9 +115,7 @@ module LoginHelpers
image: 'mock_user_thumbnail_url'
}
},
- response_object: {
- document: saml_xml(saml_response)
- }
+ response_object: response_object
}
})
Rails.application.env_config['omniauth.auth'] = OmniAuth.config.mock_auth[provider.to_sym]
diff --git a/spec/support/import_export/export_file_helper.rb b/spec/support/import_export/export_file_helper.rb
index ac320934f5a..388b88f0331 100644
--- a/spec/support/import_export/export_file_helper.rb
+++ b/spec/support/import_export/export_file_helper.rb
@@ -91,7 +91,7 @@ module ExportFileHelper
loop do
object_with_parent = deep_find_with_parent(sensitive_word, project_hash)
- return nil unless object_with_parent && object_with_parent.object
+ return unless object_with_parent && object_with_parent.object
if is_safe_hash?(object_with_parent.parent, sensitive_word)
# It's in the safe list, remove hash and keep looking
diff --git a/spec/support/matchers/access_matchers.rb b/spec/support/matchers/access_matchers.rb
index 3e4ca8b7ab0..e6899e2d23c 100644
--- a/spec/support/matchers/access_matchers.rb
+++ b/spec/support/matchers/access_matchers.rb
@@ -7,29 +7,28 @@ module AccessMatchers
extend RSpec::Matchers::DSL
include Warden::Test::Helpers
- def emulate_user(user, membership = nil)
- case user
- when :user
- login_as(create(:user))
+ def emulate_user(user_type_or_trait, membership = nil)
+ case user_type_or_trait
+ when :user, :admin
+ login_as(create(user_type_or_trait))
+ when :external, :auditor
+ login_as(create(:user, user_type_or_trait))
when :visitor
logout
- when :admin
- login_as(create(:admin))
- when :external
- login_as(create(:user, external: true))
when User
- login_as(user)
+ login_as(user_type_or_trait)
when *Gitlab::Access.sym_options_with_owner.keys
- raise ArgumentError, "cannot emulate #{user} without membership parent" unless membership
-
- role = user
+ raise ArgumentError, "cannot emulate #{user_type_or_trait} without membership parent" unless membership
- if role == :owner && membership.owner
- user = membership.owner
- else
- user = create(:user)
- membership.public_send(:"add_#{role}", user)
- end
+ role = user_type_or_trait
+ user =
+ if role == :owner && membership.owner
+ membership.owner
+ else
+ create(:user).tap do |new_user|
+ membership.public_send(:"add_#{role}", new_user)
+ end
+ end
login_as(user)
else
diff --git a/spec/support/pg_stat_activity.rb b/spec/support/pg_stat_activity.rb
new file mode 100644
index 00000000000..f93fba08a19
--- /dev/null
+++ b/spec/support/pg_stat_activity.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.before do
+ if Gitlab::Database.postgresql? && ENV['PG_STAT_WARNING_THRESHOLD']
+ warning_threshold = ENV['PG_STAT_WARNING_THRESHOLD'].to_i
+ results = ActiveRecord::Base.connection.execute('SELECT * FROM pg_stat_activity')
+ ntuples = results.ntuples
+
+ warn("pg_stat_activity count: #{ntuples}")
+
+ if ntuples > warning_threshold
+ results.each do |result|
+ warn result.inspect
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/services_shared_context.rb b/spec/support/shared_contexts/services_shared_context.rb
index d92e8318fa0..089f1798cd2 100644
--- a/spec/support/shared_contexts/services_shared_context.rb
+++ b/spec/support/shared_contexts/services_shared_context.rb
@@ -26,6 +26,14 @@ Service.available_services_names.each do |service|
end
end
+ before do
+ if service == 'github' && respond_to?(:stub_licensed_features)
+ stub_licensed_features(github_project_service_integration: true)
+ project.clear_memoization(:disabled_services)
+ project.clear_memoization(:licensed_feature_available)
+ end
+ end
+
def initialize_service(service)
service_item = project.find_or_initialize_service(service)
service_item.properties = service_attrs
diff --git a/spec/support/shared_examples/issuable_shared_examples.rb b/spec/support/shared_examples/issuable_shared_examples.rb
index c3d40c5b231..d97b21f71cd 100644
--- a/spec/support/shared_examples/issuable_shared_examples.rb
+++ b/spec/support/shared_examples/issuable_shared_examples.rb
@@ -31,7 +31,7 @@ shared_examples 'system notes for milestones' do
context 'project milestones' do
it 'creates a system note' do
expect do
- update_issuable(milestone: create(:milestone))
+ update_issuable(milestone: create(:milestone, project: project))
end.to change { Note.system.count }.by(1)
end
end
diff --git a/spec/support/shared_examples/notify_shared_examples.rb b/spec/support/shared_examples/notify_shared_examples.rb
index a38354060cf..4fff1c4e228 100644
--- a/spec/support/shared_examples/notify_shared_examples.rb
+++ b/spec/support/shared_examples/notify_shared_examples.rb
@@ -252,3 +252,31 @@ shared_examples 'a note email' do
end
end
end
+
+shared_examples 'appearance header and footer enabled' do
+ it "contains header and footer" do
+ create :appearance, header_message: "Foo", footer_message: "Bar", email_header_and_footer_enabled: true
+
+ aggregate_failures do
+ expect(subject.html_part).to have_body_text("<div class=\"header-message\" style=\"\"><p>Foo</p></div>")
+ expect(subject.html_part).to have_body_text("<div class=\"footer-message\" style=\"\"><p>Bar</p></div>")
+
+ expect(subject.text_part).to have_body_text(/^Foo/)
+ expect(subject.text_part).to have_body_text(/Bar$/)
+ end
+ end
+end
+
+shared_examples 'appearance header and footer not enabled' do
+ it "does not contain header and footer" do
+ create :appearance, header_message: "Foo", footer_message: "Bar", email_header_and_footer_enabled: false
+
+ aggregate_failures do
+ expect(subject.html_part).not_to have_body_text("<div class=\"header-message\" style=\"\"><p>Foo</p></div>")
+ expect(subject.html_part).not_to have_body_text("<div class=\"footer-message\" style=\"\"><p>Bar</p></div>")
+
+ expect(subject.text_part).not_to have_body_text(/^Foo/)
+ expect(subject.text_part).not_to have_body_text(/Bar$/)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/discussions.rb b/spec/support/shared_examples/requests/api/discussions.rb
index e44da4faa5a..eff8e401bad 100644
--- a/spec/support/shared_examples/requests/api/discussions.rb
+++ b/spec/support/shared_examples/requests/api/discussions.rb
@@ -86,6 +86,37 @@ shared_examples 'discussions API' do |parent_type, noteable_type, id_name|
expect(response).to have_gitlab_http_status(404)
end
end
+
+ context 'when a project is public with private repo access' do
+ let!(:parent) { create(:project, :public, :repository, :repository_private, :snippets_private) }
+ let!(:user_without_access) { create(:user) }
+
+ context 'when user is not a team member of private repo' do
+ before do
+ project.team.truncate
+ end
+
+ context "creating a new note" do
+ before do
+ post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions", user_without_access), params: { body: 'hi!' }
+ end
+
+ it 'raises 404 error' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+
+ context "fetching a discussion" do
+ before do
+ get api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions/#{note.discussion_id}", user_without_access)
+ end
+
+ it 'raises 404 error' do
+ expect(response).to have_gitlab_http_status(404)
+ end
+ end
+ end
+ end
end
describe "POST /#{parent_type}/:id/#{noteable_type}/:noteable_id/discussions/:discussion_id/notes" do
diff --git a/spec/support/shared_examples/requests/api/merge_requests_list.rb b/spec/support/shared_examples/requests/api/merge_requests_list.rb
index 6713ec47ace..32e3b81c3c5 100644
--- a/spec/support/shared_examples/requests/api/merge_requests_list.rb
+++ b/spec/support/shared_examples/requests/api/merge_requests_list.rb
@@ -186,6 +186,37 @@ shared_examples 'merge requests list' do
expect(json_response.length).to eq(0)
end
+ it 'returns an array of labeled merge requests where all labels match' do
+ path = endpoint_path + "?labels[]=#{label.title}&labels[]=#{label2.title}"
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['labels']).to eq([label2.title, label.title])
+ end
+
+ it 'returns an array of merge requests with any label when filtering by any label' do
+ get api(endpoint_path, user), params: { labels: [" #{label.title} ", " #{label2.title} "] }
+
+ expect_paginated_array_response
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['labels']).to eq([label2.title, label.title])
+ expect(json_response.first['id']).to eq(merge_request.id)
+ end
+
+ it 'returns an array of merge requests with any label when filtering by any label' do
+ get api(endpoint_path, user), params: { labels: ["#{label.title} , #{label2.title}"] }
+
+ expect_paginated_array_response
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['labels']).to eq([label2.title, label.title])
+ expect(json_response.first['id']).to eq(merge_request.id)
+ end
+
it 'returns an array of merge requests with any label when filtering by any label' do
get api(endpoint_path, user), params: { labels: IssuesFinder::FILTER_ANY }
diff --git a/spec/support/shared_examples/views/nav_sidebar.rb b/spec/support/shared_examples/views/nav_sidebar.rb
new file mode 100644
index 00000000000..6ac5abe275d
--- /dev/null
+++ b/spec/support/shared_examples/views/nav_sidebar.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+shared_examples 'has nav sidebar' do
+ it 'has collapsed nav sidebar on mobile' do
+ render
+
+ expect(rendered).to have_selector('.nav-sidebar')
+ expect(rendered).not_to have_selector('.sidebar-collapsed-desktop')
+ expect(rendered).not_to have_selector('.sidebar-expanded-mobile')
+ end
+end
diff --git a/spec/support/webmock.rb b/spec/support/webmock.rb
index af2906b7568..9ac7e7fc515 100644
--- a/spec/support/webmock.rb
+++ b/spec/support/webmock.rb
@@ -1,4 +1,12 @@
require 'webmock'
require 'webmock/rspec'
-WebMock.disable_net_connect!(allow_localhost: true)
+def webmock_allowed_hosts
+ %w[elasticsearch registry.gitlab.com-gitlab-org-test-elastic-image].tap do |hosts|
+ if ENV.key?('ELASTIC_URL')
+ hosts << URI.parse(ENV['ELASTIC_URL']).host
+ end
+ end.uniq
+end
+
+WebMock.disable_net_connect!(allow_localhost: true, allow: webmock_allowed_hosts)
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index de29d0c943f..e474a714b10 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -1,8 +1,9 @@
require 'spec_helper'
describe FileMover do
+ include FileMoverHelpers
+
let(:filename) { 'banana_sample.gif' }
- let(:file) { fixture_file_upload(File.join('spec', 'fixtures', filename)) }
let(:temp_file_path) { File.join('uploads/-/system/temp', 'secret55', filename) }
let(:temp_description) do
@@ -12,7 +13,7 @@ describe FileMover do
let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, 'secret55', filename) }
let(:snippet) { create(:personal_snippet, description: temp_description) }
- subject { described_class.new(file_path, snippet).execute }
+ subject { described_class.new(temp_file_path, snippet).execute }
describe '#execute' do
before do
@@ -20,6 +21,8 @@ describe FileMover do
expect(FileUtils).to receive(:move).with(a_string_including(temp_file_path), a_string_including(file_path))
allow_any_instance_of(CarrierWave::SanitizedFile).to receive(:exists?).and_return(true)
allow_any_instance_of(CarrierWave::SanitizedFile).to receive(:size).and_return(10)
+
+ stub_file_mover(temp_file_path)
end
context 'when move and field update successful' do
@@ -66,4 +69,30 @@ describe FileMover do
end
end
end
+
+ context 'security' do
+ context 'when relative path is involved' do
+ let(:temp_file_path) { File.join('uploads/-/system/temp', '..', 'another_subdir_of_temp') }
+
+ it 'does not trigger move if path is outside designated directory' do
+ stub_file_mover('uploads/-/system/another_subdir_of_temp')
+ expect(FileUtils).not_to receive(:move)
+
+ subject
+
+ expect(snippet.reload.description).to eq(temp_description)
+ end
+ end
+
+ context 'when symlink is involved' do
+ it 'does not trigger move if path is outside designated directory' do
+ stub_file_mover(temp_file_path, stub_real_path: Pathname('/etc'))
+ expect(FileUtils).not_to receive(:move)
+
+ subject
+
+ expect(snippet.reload.description).to eq(temp_description)
+ end
+ end
+ end
end
diff --git a/spec/validators/sha_validator_spec.rb b/spec/validators/sha_validator_spec.rb
new file mode 100644
index 00000000000..dc1539cf318
--- /dev/null
+++ b/spec/validators/sha_validator_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ShaValidator do
+ let(:validator) { described_class.new(attributes: [:base_commit_sha]) }
+ let(:merge_diff) { build(:merge_request_diff) }
+
+ subject { validator.validate_each(merge_diff, :base_commit_sha, value) }
+
+ context 'with empty value' do
+ let(:value) { nil }
+
+ it 'does not add any error if value is empty' do
+ subject
+
+ expect(merge_diff.errors).to be_empty
+ end
+ end
+
+ context 'with valid sha' do
+ let(:value) { Digest::SHA1.hexdigest(SecureRandom.hex) }
+
+ it 'does not add any error if value is empty' do
+ subject
+
+ expect(merge_diff.errors).to be_empty
+ end
+ end
+
+ context 'with invalid sha' do
+ let(:value) { 'foo' }
+
+ it 'adds error to the record' do
+ expect(merge_diff.errors).to be_empty
+
+ subject
+
+ expect(merge_diff.errors).not_to be_empty
+ end
+ end
+end
diff --git a/spec/views/ci/status/_icon.html.haml_spec.rb b/spec/views/ci/status/_icon.html.haml_spec.rb
new file mode 100644
index 00000000000..626159fc512
--- /dev/null
+++ b/spec/views/ci/status/_icon.html.haml_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe 'ci/status/_icon' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :private) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when rendering status for build' do
+ let(:build) do
+ create(:ci_build, :success, pipeline: pipeline)
+ end
+
+ context 'when user has ability to see details' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'has link to build details page' do
+ details_path = project_job_path(project, build)
+
+ render_status(build)
+
+ expect(rendered).to have_link(href: details_path)
+ end
+ end
+
+ context 'when user do not have ability to see build details' do
+ before do
+ render_status(build)
+ end
+
+ it 'contains build status text' do
+ expect(rendered).to have_css('.ci-status-icon.ci-status-icon-success')
+ end
+
+ it 'does not contain links' do
+ expect(rendered).not_to have_link
+ end
+ end
+ end
+
+ context 'when rendering status for external job' do
+ context 'when user has ability to see commit status details' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'status has external target url' do
+ before do
+ external_job = create(:generic_commit_status,
+ status: :running,
+ pipeline: pipeline,
+ target_url: 'http://gitlab.com')
+
+ render_status(external_job)
+ end
+
+ it 'contains valid commit status text' do
+ expect(rendered).to have_css('.ci-status-icon.ci-status-icon-running')
+ end
+
+ it 'has link to external status page' do
+ expect(rendered).to have_link(href: 'http://gitlab.com')
+ end
+ end
+
+ context 'status do not have external target url' do
+ before do
+ external_job = create(:generic_commit_status, status: :canceled)
+
+ render_status(external_job)
+ end
+
+ it 'contains valid commit status text' do
+ expect(rendered).to have_css('.ci-status-icon.ci-status-icon-canceled')
+ end
+
+ it 'has link to external status page' do
+ expect(rendered).not_to have_link
+ end
+ end
+ end
+ end
+
+ def render_status(resource)
+ render 'ci/status/icon', status: resource.detailed_status(user)
+ end
+end
diff --git a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
index 05c2f61a606..bf63021a7fa 100644
--- a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
@@ -26,6 +26,8 @@ describe 'layouts/nav/sidebar/_admin' do
it_behaves_like 'page has active tab', 'Overview'
end
+ it_behaves_like 'has nav sidebar'
+
context 'on projects' do
before do
allow(controller).to receive(:controller_name).and_return('projects')
diff --git a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
new file mode 100644
index 00000000000..24b66a0e767
--- /dev/null
+++ b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'layouts/nav/sidebar/_group' do
+ let(:group) { create(:group) }
+
+ before do
+ assign(:group, group)
+ end
+
+ it_behaves_like 'has nav sidebar'
+end
diff --git a/spec/views/layouts/nav/sidebar/_instance_statistics.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_instance_statistics.html.haml_spec.rb
new file mode 100644
index 00000000000..7f7f5637035
--- /dev/null
+++ b/spec/views/layouts/nav/sidebar/_instance_statistics.html.haml_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'layouts/nav/sidebar/_instance_statistics' do
+ it_behaves_like 'has nav sidebar'
+end
diff --git a/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
new file mode 100644
index 00000000000..6b820ab0b4c
--- /dev/null
+++ b/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'layouts/nav/sidebar/_profile' do
+ let(:user) { create(:user) }
+
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ it_behaves_like 'has nav sidebar'
+end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index d9f05e5f94f..2c60ccfb754 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -11,6 +11,8 @@ describe 'layouts/nav/sidebar/_project' do
allow(view).to receive(:can?).and_return(true)
end
+ it_behaves_like 'has nav sidebar'
+
describe 'issue boards' do
it 'has board tab' do
render
diff --git a/spec/views/projects/deployments/_confirm_rollback_modal_spec.html.rb b/spec/views/projects/deployments/_confirm_rollback_modal_spec.html.rb
new file mode 100644
index 00000000000..54ec4f32856
--- /dev/null
+++ b/spec/views/projects/deployments/_confirm_rollback_modal_spec.html.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'projects/deployments/_confirm_rollback_modal' do
+ let(:environment) { create(:environment, :with_review_app) }
+ let(:deployments) { environment.deployments }
+ let(:project) { environment.project }
+
+ before do
+ assign(:environment, environment)
+ assign(:deployments, deployments)
+ assign(:project, project)
+ end
+
+ context 'when re-deploying last deployment' do
+ let(:deployment) { deployments.first }
+
+ before do
+ allow(view).to receive(:deployment).and_return(deployment)
+ end
+
+ it 'shows "re-deploy"' do
+ render
+
+ expect(rendered).to have_selector('h4', text: "Re-deploy environment #{environment.name}?")
+ expect(rendered).to have_selector('p', text: "This action will relaunch the job for commit #{deployment.short_sha}, putting the environment in a previous version. Are you sure you want to continue?")
+ expect(rendered).to have_selector('a.btn-danger', text: 'Re-deploy')
+ end
+
+ it 'links to re-deploying the environment' do
+ expected_link = retry_project_job_path(environment.project, deployment.deployable)
+
+ render
+
+ expect(rendered).to have_selector("a[href='#{expected_link}']", text: 'Re-deploy')
+ end
+ end
+
+ context 'when rolling back to previous deployment' do
+ let(:deployment) { create(:deployment, environment: environment) }
+
+ before do
+ allow(view).to receive(:deployment).and_return(deployment)
+ end
+
+ it 'shows "rollback"' do
+ render
+
+ expect(rendered).to have_selector('h4', text: "Rollback environment #{environment.name}?")
+ expect(rendered).to have_selector('p', text: "This action will run the job defined by staging for commit #{deployment.short_sha}, putting the environment in a previous version. You can revert it by re-deploying the latest version of your application. Are you sure you want to continue?")
+ expect(rendered).to have_selector('a.btn-danger', text: 'Rollback')
+ end
+
+ it 'links to re-deploying the environment' do
+ expected_link = retry_project_job_path(environment.project, deployment.deployable)
+
+ render
+
+ expect(rendered).to have_selector("a[href='#{expected_link}']", text: 'Rollback')
+ end
+ end
+end
diff --git a/spec/views/projects/issues/_merge_requests_status.html.haml_spec.rb b/spec/views/projects/issues/_merge_requests_status.html.haml_spec.rb
index 02c225292ce..9424795749d 100644
--- a/spec/views/projects/issues/_merge_requests_status.html.haml_spec.rb
+++ b/spec/views/projects/issues/_merge_requests_status.html.haml_spec.rb
@@ -2,6 +2,12 @@
require 'spec_helper'
describe 'projects/issues/_merge_requests_status.html.haml' do
+ around do |ex|
+ Timecop.freeze(Date.new(2018, 7, 22)) do
+ ex.run
+ end
+ end
+
it 'shows date of status change in tooltip' do
merge_request = create(:merge_request, created_at: 1.month.ago)
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index 8e34521c7c8..1bca8bba940 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -30,7 +30,6 @@ describe 'projects/settings/operations/show' do
expect(rendered).to have_content _('Error Tracking')
expect(rendered).to have_content _('To link Sentry to GitLab, enter your Sentry URL and Auth Token')
- expect(rendered).to have_content _('Active')
end
end
end
diff --git a/spec/workers/project_migrate_hashed_storage_worker_spec.rb b/spec/workers/hashed_storage/project_migrate_worker_spec.rb
index 333eb6a0569..340e722aa7e 100644
--- a/spec/workers/project_migrate_hashed_storage_worker_spec.rb
+++ b/spec/workers/hashed_storage/project_migrate_worker_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe ProjectMigrateHashedStorageWorker, :clean_gitlab_redis_shared_state do
+describe HashedStorage::ProjectMigrateWorker, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
describe '#perform' do
diff --git a/spec/workers/hashed_storage/project_rollback_worker_spec.rb b/spec/workers/hashed_storage/project_rollback_worker_spec.rb
new file mode 100644
index 00000000000..d833553c0ec
--- /dev/null
+++ b/spec/workers/hashed_storage/project_rollback_worker_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe HashedStorage::ProjectRollbackWorker, :clean_gitlab_redis_shared_state do
+ include ExclusiveLeaseHelpers
+
+ describe '#perform' do
+ let(:project) { create(:project, :empty_repo) }
+ let(:lease_key) { "project_migrate_hashed_storage_worker:#{project.id}" }
+ let(:lease_timeout) { described_class::LEASE_TIMEOUT }
+ let(:rollback_service) { ::Projects::HashedStorage::RollbackService }
+
+ it 'skips when project no longer exists' do
+ expect(rollback_service).not_to receive(:new)
+
+ subject.perform(-1)
+ end
+
+ it 'skips when project is pending delete' do
+ pending_delete_project = create(:project, :empty_repo, pending_delete: true)
+
+ expect(rollback_service).not_to receive(:new)
+
+ subject.perform(pending_delete_project.id)
+ end
+
+ it 'delegates rollback to service class when have exclusive lease' do
+ stub_exclusive_lease(lease_key, 'uuid', timeout: lease_timeout)
+
+ service_spy = spy
+
+ allow(rollback_service)
+ .to receive(:new).with(project, project.disk_path, logger: subject.logger)
+ .and_return(service_spy)
+
+ subject.perform(project.id)
+
+ expect(service_spy).to have_received(:execute)
+ end
+
+ it 'skips when it cant acquire the exclusive lease' do
+ stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
+
+ expect(rollback_service).not_to receive(:new)
+
+ subject.perform(project.id)
+ end
+ end
+end
diff --git a/spec/workers/hashed_storage/rollbacker_worker_spec.rb b/spec/workers/hashed_storage/rollbacker_worker_spec.rb
new file mode 100644
index 00000000000..4055f380978
--- /dev/null
+++ b/spec/workers/hashed_storage/rollbacker_worker_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe HashedStorage::RollbackerWorker do
+ subject(:worker) { described_class.new }
+ let(:projects) { create_list(:project, 2, :empty_repo) }
+ let(:ids) { projects.map(&:id) }
+
+ describe '#perform' do
+ it 'delegates to MigratorService' do
+ expect_any_instance_of(Gitlab::HashedStorage::Migrator).to receive(:bulk_rollback).with(start: 5, finish: 10)
+
+ worker.perform(5, 10)
+ end
+
+ it 'rollsback projects in the specified range' do
+ perform_enqueued_jobs do
+ worker.perform(ids.min, ids.max)
+ end
+
+ projects.each do |project|
+ expect(project.reload.legacy_storage?).to be_truthy
+ end
+ end
+ end
+end
diff --git a/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
index 963237ceadf..d20d926f5a0 100644
--- a/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
+++ b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
@@ -18,7 +18,7 @@ describe UpdateHeadPipelineForMergeRequestWorker do
context 'when merge request sha does not equal pipeline sha' do
before do
- merge_request.merge_request_diff.update(head_commit_sha: 'different_sha')
+ merge_request.merge_request_diff.update(head_commit_sha: Digest::SHA1.hexdigest(SecureRandom.hex))
end
it 'does not update head pipeline' do
@@ -39,7 +39,7 @@ describe UpdateHeadPipelineForMergeRequestWorker do
let!(:merge_request_pipeline) do
create(:ci_pipeline,
project: project,
- source: :merge_request,
+ source: :merge_request_event,
sha: latest_sha,
merge_request: merge_request)
end