summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/projects_controller_spec.rb10
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb31
-rw-r--r--spec/controllers/dashboard/projects_controller_spec.rb26
-rw-r--r--spec/controllers/explore/projects_controller_spec.rb30
-rw-r--r--spec/controllers/groups/boards_controller_spec.rb22
-rw-r--r--spec/controllers/groups/settings/ci_cd_controller_spec.rb73
-rw-r--r--spec/controllers/groups_controller_spec.rb37
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb27
-rw-r--r--spec/controllers/projects/boards_controller_spec.rb22
-rw-r--r--spec/controllers/projects/git_http_controller_spec.rb15
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb31
-rw-r--r--spec/controllers/projects_controller_spec.rb17
-rw-r--r--spec/controllers/registrations_controller_spec.rb6
-rw-r--r--spec/factories/ci/builds.rb4
-rw-r--r--spec/factories/ci/pipelines.rb14
-rw-r--r--spec/factories/clusters/clusters.rb4
-rw-r--r--spec/factories/clusters/providers/gcp.rb4
-rw-r--r--spec/factories/commit_statuses.rb4
-rw-r--r--spec/factories/groups.rb8
-rw-r--r--spec/factories/merge_requests.rb25
-rw-r--r--spec/factories/projects.rb4
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb6
-rw-r--r--spec/features/groups/settings/ci_cd_spec.rb45
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb84
-rw-r--r--spec/features/issues_spec.rb11
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb78
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb113
-rw-r--r--spec/features/merge_request/user_views_diffs_spec.rb10
-rw-r--r--spec/features/merge_requests/user_filters_by_target_branch_spec.rb45
-rw-r--r--spec/features/projects/badges/pipeline_badge_spec.rb19
-rw-r--r--spec/features/projects/clusters/applications_spec.rb2
-rw-r--r--spec/features/projects/jobs_spec.rb109
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb146
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb119
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb31
-rw-r--r--spec/features/projects/show/user_sees_git_instructions_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb58
-rw-r--r--spec/features/search/user_searches_for_users_spec.rb83
-rw-r--r--spec/features/security/group/private_access_spec.rb15
-rw-r--r--spec/features/user_opens_link_to_comment.rb33
-rw-r--r--spec/features/users/login_spec.rb29
-rw-r--r--spec/finders/group_projects_finder_spec.rb39
-rw-r--r--spec/finders/issues_finder_spec.rb83
-rw-r--r--spec/finders/labels_finder_spec.rb6
-rw-r--r--spec/finders/merge_requests_finder_spec.rb139
-rw-r--r--spec/finders/snippets_finder_spec.rb221
-rw-r--r--spec/finders/users_finder_spec.rb31
-rw-r--r--spec/fixtures/api/schemas/board.json3
-rw-r--r--spec/fixtures/api/schemas/entities/issue.json3
-rw-r--r--spec/fixtures/api/schemas/entities/issue_boards.json3
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_widget.json4
-rw-r--r--spec/fixtures/api/schemas/issue.json5
-rw-r--r--spec/fixtures/api/schemas/issues.json3
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/merge_request.json124
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/merge_requests.json122
-rw-r--r--spec/frontend/behaviors/secret_values_spec.js (renamed from spec/javascripts/behaviors/secret_values_spec.js)0
-rw-r--r--spec/frontend/blob/blob_fork_suggestion_spec.js (renamed from spec/javascripts/blob/blob_fork_suggestion_spec.js)0
-rw-r--r--spec/frontend/boards/modal_store_spec.js (renamed from spec/javascripts/boards/modal_store_spec.js)0
-rw-r--r--spec/frontend/cycle_analytics/limit_warning_component_spec.js (renamed from spec/javascripts/cycle_analytics/limit_warning_component_spec.js)0
-rw-r--r--spec/frontend/diffs/components/diff_stats_spec.js (renamed from spec/javascripts/diffs/components/diff_stats_spec.js)0
-rw-r--r--spec/frontend/diffs/components/edit_button_spec.js (renamed from spec/javascripts/diffs/components/edit_button_spec.js)0
-rw-r--r--spec/frontend/diffs/components/hidden_files_warning_spec.js (renamed from spec/javascripts/diffs/components/hidden_files_warning_spec.js)0
-rw-r--r--spec/frontend/diffs/components/no_changes_spec.js (renamed from spec/javascripts/diffs/components/no_changes_spec.js)0
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js (renamed from spec/javascripts/error_tracking/components/error_tracking_list_spec.js)0
-rw-r--r--spec/frontend/error_tracking/store/mutation_spec.js (renamed from spec/javascripts/error_tracking/store/mutation_spec.js)0
-rw-r--r--spec/frontend/filtered_search/filtered_search_token_keys_spec.js (renamed from spec/javascripts/filtered_search/filtered_search_token_keys_spec.js)0
-rw-r--r--spec/frontend/filtered_search/services/recent_searches_service_error_spec.js (renamed from spec/javascripts/filtered_search/services/recent_searches_service_error_spec.js)0
-rw-r--r--spec/frontend/filtered_search/stores/recent_searches_store_spec.js (renamed from spec/javascripts/filtered_search/stores/recent_searches_store_spec.js)0
-rw-r--r--spec/frontend/frequent_items/store/getters_spec.js (renamed from spec/javascripts/frequent_items/store/getters_spec.js)0
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js103
-rw-r--r--spec/frontend/helpers/fixtures.js24
-rw-r--r--spec/frontend/helpers/timeout.js24
-rw-r--r--spec/frontend/helpers/vue_mount_component_helper.js38
-rw-r--r--spec/frontend/ide/lib/common/disposable_spec.js (renamed from spec/javascripts/ide/lib/common/disposable_spec.js)0
-rw-r--r--spec/frontend/ide/lib/diff/diff_spec.js (renamed from spec/javascripts/ide/lib/diff/diff_spec.js)0
-rw-r--r--spec/frontend/ide/lib/editor_options_spec.js (renamed from spec/javascripts/ide/lib/editor_options_spec.js)0
-rw-r--r--spec/frontend/ide/lib/files_spec.js (renamed from spec/javascripts/ide/lib/files_spec.js)0
-rw-r--r--spec/frontend/ide/stores/modules/commit/mutations_spec.js (renamed from spec/javascripts/ide/stores/modules/commit/mutations_spec.js)0
-rw-r--r--spec/frontend/ide/stores/modules/file_templates/getters_spec.js (renamed from spec/javascripts/ide/stores/modules/file_templates/getters_spec.js)0
-rw-r--r--spec/frontend/ide/stores/modules/file_templates/mutations_spec.js (renamed from spec/javascripts/ide/stores/modules/file_templates/mutations_spec.js)0
-rw-r--r--spec/frontend/ide/stores/modules/pane/getters_spec.js (renamed from spec/javascripts/ide/stores/modules/pane/getters_spec.js)0
-rw-r--r--spec/frontend/ide/stores/modules/pane/mutations_spec.js (renamed from spec/javascripts/ide/stores/modules/pane/mutations_spec.js)0
-rw-r--r--spec/frontend/ide/stores/modules/pipelines/getters_spec.js (renamed from spec/javascripts/ide/stores/modules/pipelines/getters_spec.js)0
-rw-r--r--spec/frontend/ide/stores/mutations/branch_spec.js (renamed from spec/javascripts/ide/stores/mutations/branch_spec.js)0
-rw-r--r--spec/frontend/ide/stores/mutations/merge_request_spec.js (renamed from spec/javascripts/ide/stores/mutations/merge_request_spec.js)0
-rw-r--r--spec/frontend/image_diff/view_types_spec.js (renamed from spec/javascripts/image_diff/view_types_spec.js)0
-rw-r--r--spec/frontend/import_projects/store/getters_spec.js (renamed from spec/javascripts/import_projects/store/getters_spec.js)0
-rw-r--r--spec/frontend/import_projects/store/mutations_spec.js (renamed from spec/javascripts/import_projects/store/mutations_spec.js)0
-rw-r--r--spec/frontend/jobs/components/empty_state_spec.js (renamed from spec/javascripts/jobs/components/empty_state_spec.js)0
-rw-r--r--spec/frontend/jobs/components/erased_block_spec.js (renamed from spec/javascripts/jobs/components/erased_block_spec.js)0
-rw-r--r--spec/frontend/jobs/components/sidebar_detail_row_spec.js (renamed from spec/javascripts/jobs/components/sidebar_detail_row_spec.js)0
-rw-r--r--spec/frontend/jobs/components/stuck_block_spec.js (renamed from spec/javascripts/jobs/components/stuck_block_spec.js)0
-rw-r--r--spec/frontend/jobs/store/getters_spec.js (renamed from spec/javascripts/jobs/store/getters_spec.js)0
-rw-r--r--spec/frontend/jobs/store/mutations_spec.js (renamed from spec/javascripts/jobs/store/mutations_spec.js)0
-rw-r--r--spec/frontend/labels_select_spec.js (renamed from spec/javascripts/labels_select_spec.js)0
-rw-r--r--spec/frontend/lib/utils/autosave_spec.js64
-rw-r--r--spec/frontend/lib/utils/cache_spec.js (renamed from spec/javascripts/lib/utils/cache_spec.js)0
-rw-r--r--spec/frontend/lib/utils/grammar_spec.js (renamed from spec/javascripts/lib/utils/grammar_spec.js)0
-rw-r--r--spec/frontend/lib/utils/image_utility_spec.js (renamed from spec/javascripts/lib/utils/image_utility_spec.js)0
-rw-r--r--spec/frontend/lib/utils/number_utility_spec.js (renamed from spec/javascripts/lib/utils/number_utility_spec.js)0
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js (renamed from spec/javascripts/lib/utils/text_utility_spec.js)0
-rw-r--r--spec/frontend/locale/ensure_single_line_spec.js (renamed from spec/javascripts/locale/ensure_single_line_spec.js)0
-rw-r--r--spec/frontend/locale/sprintf_spec.js (renamed from spec/javascripts/locale/sprintf_spec.js)0
-rw-r--r--spec/frontend/notebook/lib/highlight_spec.js (renamed from spec/javascripts/notebook/lib/highlight_spec.js)0
-rw-r--r--spec/frontend/notes/components/discussion_reply_placeholder_spec.js (renamed from spec/javascripts/notes/components/discussion_reply_placeholder_spec.js)0
-rw-r--r--spec/frontend/notes/components/discussion_resolve_button_spec.js (renamed from spec/javascripts/notes/components/discussion_resolve_button_spec.js)0
-rw-r--r--spec/frontend/notes/components/note_attachment_spec.js (renamed from spec/javascripts/notes/components/note_attachment_spec.js)0
-rw-r--r--spec/frontend/notes/components/note_edited_text_spec.js (renamed from spec/javascripts/notes/components/note_edited_text_spec.js)0
-rw-r--r--spec/frontend/performance_bar/services/performance_bar_service_spec.js (renamed from spec/javascripts/performance_bar/services/performance_bar_service_spec.js)0
-rw-r--r--spec/frontend/pipelines/blank_state_spec.js (renamed from spec/javascripts/pipelines/blank_state_spec.js)0
-rw-r--r--spec/frontend/pipelines/empty_state_spec.js (renamed from spec/javascripts/pipelines/empty_state_spec.js)0
-rw-r--r--spec/frontend/pipelines/pipeline_store_spec.js (renamed from spec/javascripts/pipelines/pipeline_store_spec.js)0
-rw-r--r--spec/frontend/pipelines/pipelines_store_spec.js (renamed from spec/javascripts/pipelines/pipelines_store_spec.js)0
-rw-r--r--spec/frontend/registry/getters_spec.js (renamed from spec/javascripts/registry/getters_spec.js)0
-rw-r--r--spec/frontend/reports/components/report_link_spec.js (renamed from spec/javascripts/reports/components/report_link_spec.js)0
-rw-r--r--spec/frontend/reports/store/utils_spec.js (renamed from spec/javascripts/reports/store/utils_spec.js)0
-rw-r--r--spec/frontend/sidebar/confidential_edit_buttons_spec.js (renamed from spec/javascripts/sidebar/confidential_edit_buttons_spec.js)0
-rw-r--r--spec/frontend/sidebar/confidential_edit_form_buttons_spec.js (renamed from spec/javascripts/sidebar/confidential_edit_form_buttons_spec.js)0
-rw-r--r--spec/frontend/sidebar/lock/edit_form_spec.js (renamed from spec/javascripts/sidebar/lock/edit_form_spec.js)0
-rw-r--r--spec/frontend/test_setup.js19
-rw-r--r--spec/frontend/u2f/util_spec.js (renamed from spec/javascripts/u2f/util_spec.js)0
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js (renamed from spec/javascripts/vue_mr_widget/components/mr_widget_container_spec.js)0
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js (renamed from spec/javascripts/vue_mr_widget/components/mr_widget_icon_spec.js)0
-rw-r--r--spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js (renamed from spec/javascripts/vue_mr_widget/components/states/commit_edit_spec.js)0
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js (renamed from spec/javascripts/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js)0
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js (renamed from spec/javascripts/vue_mr_widget/components/states/mr_widget_commits_header_spec.js)26
-rw-r--r--spec/frontend/vue_mr_widget/stores/get_state_key_spec.js (renamed from spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js)0
-rw-r--r--spec/frontend/vue_shared/components/callout_spec.js (renamed from spec/javascripts/vue_shared/components/callout_spec.js)0
-rw-r--r--spec/frontend/vue_shared/components/code_block_spec.js (renamed from spec/javascripts/vue_shared/components/code_block_spec.js)0
-rw-r--r--spec/frontend/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js (renamed from spec/javascripts/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js)0
-rw-r--r--spec/frontend/vue_shared/components/identicon_spec.js (renamed from spec/javascripts/vue_shared/components/identicon_spec.js)0
-rw-r--r--spec/frontend/vue_shared/components/lib/utils/dom_utils_spec.js (renamed from spec/javascripts/vue_shared/components/lib/utils/dom_utils_spec.js)0
-rw-r--r--spec/frontend/vue_shared/components/pagination_links_spec.js (renamed from spec/javascripts/vue_shared/components/pagination_links_spec.js)0
-rw-r--r--spec/frontend/vue_shared/components/time_ago_tooltip_spec.js (renamed from spec/javascripts/vue_shared/components/time_ago_tooltip_spec.js)0
-rw-r--r--spec/frontend/vuex_shared/modules/modal/mutations_spec.js (renamed from spec/javascripts/vuex_shared/modules/modal/mutations_spec.js)0
-rw-r--r--spec/helpers/auth_helper_spec.rb38
-rw-r--r--spec/helpers/auto_devops_helper_spec.rb138
-rw-r--r--spec/helpers/clusters_helper_spec.rb33
-rw-r--r--spec/javascripts/badges/components/badge_list_spec.js2
-rw-r--r--spec/javascripts/badges/components/badge_spec.js2
-rw-r--r--spec/javascripts/boards/board_list_spec.js2
-rw-r--r--spec/javascripts/clusters/clusters_bundle_spec.js80
-rw-r--r--spec/javascripts/clusters/components/applications_spec.js10
-rw-r--r--spec/javascripts/diffs/components/app_spec.js57
-rw-r--r--spec/javascripts/diffs/components/diff_file_spec.js25
-rw-r--r--spec/javascripts/diffs/store/actions_spec.js22
-rw-r--r--spec/javascripts/diffs/store/mutations_spec.js18
-rw-r--r--spec/javascripts/environments/environment_item_spec.js20
-rw-r--r--spec/javascripts/environments/environment_table_spec.js14
-rw-r--r--spec/javascripts/environments/environments_app_spec.js5
-rw-r--r--spec/javascripts/environments/environments_store_spec.js74
-rw-r--r--spec/javascripts/environments/folder/environments_folder_view_spec.js14
-rw-r--r--spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js4
-rw-r--r--spec/javascripts/filtered_search/visual_token_value_spec.js30
-rw-r--r--spec/javascripts/fixtures/ajax_loading_spinner.html.haml2
-rw-r--r--spec/javascripts/fixtures/autocomplete_sources.rb40
-rw-r--r--spec/javascripts/fixtures/balsamiq_viewer.html.haml1
-rw-r--r--spec/javascripts/fixtures/create_item_dropdown.html.haml13
-rw-r--r--spec/javascripts/fixtures/emojis.rb18
-rw-r--r--spec/javascripts/fixtures/event_filter.html.haml25
-rw-r--r--spec/javascripts/fixtures/gl_dropdown.html.haml17
-rw-r--r--spec/javascripts/fixtures/gl_field_errors.html.haml15
-rw-r--r--spec/javascripts/fixtures/issuable_filter.html.haml8
-rw-r--r--spec/javascripts/fixtures/issue_sidebar_label.html.haml16
-rw-r--r--spec/javascripts/fixtures/line_highlighter.html.haml11
-rw-r--r--spec/javascripts/fixtures/linked_tabs.html.haml13
-rw-r--r--spec/javascripts/fixtures/merge_requests_show.html.haml13
-rw-r--r--spec/javascripts/fixtures/mini_dropdown_graph.html.haml10
-rw-r--r--spec/javascripts/fixtures/notebook_viewer.html.haml1
-rw-r--r--spec/javascripts/fixtures/oauth_remember_me.html.haml6
-rw-r--r--spec/javascripts/fixtures/pdf_viewer.html.haml1
-rw-r--r--spec/javascripts/fixtures/pipeline_graph.html.haml14
-rw-r--r--spec/javascripts/fixtures/pipelines.html.haml12
-rw-r--r--spec/javascripts/fixtures/project_select_combo_button.html.haml6
-rw-r--r--spec/javascripts/fixtures/search_autocomplete.html.haml9
-rw-r--r--spec/javascripts/fixtures/signin_tabs.html.haml5
-rw-r--r--spec/javascripts/fixtures/sketch_viewer.html.haml2
-rw-r--r--spec/javascripts/fixtures/static/README.md3
-rw-r--r--spec/javascripts/fixtures/static/ajax_loading_spinner.html.raw3
-rw-r--r--spec/javascripts/fixtures/static/balsamiq_viewer.html.raw1
-rw-r--r--spec/javascripts/fixtures/static/create_item_dropdown.html.raw11
-rw-r--r--spec/javascripts/fixtures/static/event_filter.html.raw44
-rw-r--r--spec/javascripts/fixtures/static/gl_dropdown.html.raw26
-rw-r--r--spec/javascripts/fixtures/static/gl_field_errors.html.raw22
-rw-r--r--spec/javascripts/fixtures/static/issuable_filter.html.raw9
-rw-r--r--spec/javascripts/fixtures/static/issue_sidebar_label.html.raw26
-rw-r--r--spec/javascripts/fixtures/static/line_highlighter.html.raw107
-rw-r--r--spec/javascripts/fixtures/static/linked_tabs.html.raw20
-rw-r--r--spec/javascripts/fixtures/static/merge_requests_show.html.raw15
-rw-r--r--spec/javascripts/fixtures/static/mini_dropdown_graph.html.raw13
-rw-r--r--spec/javascripts/fixtures/static/notebook_viewer.html.raw1
-rw-r--r--spec/javascripts/fixtures/static/oauth_remember_me.html.raw6
-rw-r--r--spec/javascripts/fixtures/static/pdf_viewer.html.raw1
-rw-r--r--spec/javascripts/fixtures/static/pipeline_graph.html.raw24
-rw-r--r--spec/javascripts/fixtures/static/pipelines.html.raw3
-rw-r--r--spec/javascripts/fixtures/static/project_select_combo_button.html.raw9
-rw-r--r--spec/javascripts/fixtures/static/search_autocomplete.html.raw15
-rw-r--r--spec/javascripts/fixtures/static/signin_tabs.html.raw8
-rw-r--r--spec/javascripts/fixtures/static/sketch_viewer.html.raw3
-rw-r--r--spec/javascripts/fixtures/static_fixtures.rb24
-rw-r--r--spec/javascripts/frequent_items/components/app_spec.js2
-rw-r--r--spec/javascripts/groups/components/app_spec.js2
-rw-r--r--spec/javascripts/helpers/filtered_search_spec_helper.js2
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/radio_group_spec.js7
-rw-r--r--spec/javascripts/ide/components/new_dropdown/modal_spec.js12
-rw-r--r--spec/javascripts/ide/stores/modules/commit/actions_spec.js2
-rw-r--r--spec/javascripts/ide/stores/modules/commit/getters_spec.js8
-rw-r--r--spec/javascripts/jobs/components/stages_dropdown_spec.js188
-rw-r--r--spec/javascripts/notes/components/note_form_spec.js94
-rw-r--r--spec/javascripts/notes/components/noteable_discussion_spec.js14
-rw-r--r--spec/javascripts/pipelines/graph/stage_column_component_spec.js2
-rw-r--r--spec/javascripts/pipelines/pipeline_url_spec.js12
-rw-r--r--spec/javascripts/registry/components/app_spec.js2
-rw-r--r--spec/javascripts/reports/components/grouped_test_reports_app_spec.js10
-rw-r--r--spec/javascripts/test_bundle.js72
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js83
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_status_icon_spec.js2
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js2
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js2
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js141
-rw-r--r--spec/javascripts/vue_mr_widget/mock_data.js11
-rw-r--r--spec/javascripts/vue_shared/components/commit_spec.js90
-rw-r--r--spec/javascripts/vue_shared/components/file_icon_spec.js7
-rw-r--r--spec/javascripts/vue_shared/components/header_ci_component_spec.js2
-rw-r--r--spec/lib/backup/uploads_spec.rb18
-rw-r--r--spec/lib/banzai/filter/output_safety_spec.rb29
-rw-r--r--spec/lib/banzai/filter/suggestion_filter_spec.rb33
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb32
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb11
-rw-r--r--spec/lib/gitlab/authorized_keys_spec.rb194
-rw-r--r--spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb6
-rw-r--r--spec/lib/gitlab/badge/pipeline/template_spec.rb10
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb20
-rw-r--r--spec/lib/gitlab/checks/branch_check_spec.rb80
-rw-r--r--spec/lib/gitlab/ci/build/policy/refs_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/build/prerequisite/factory_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb85
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/preparing_spec.rb33
-rw-r--r--spec/lib/gitlab/ci/status/preparing_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/trace/stream_spec.rb2
-rw-r--r--spec/lib/gitlab/danger/teammate_spec.rb39
-rw-r--r--spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb19
-rw-r--r--spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb16
-rw-r--r--spec/lib/gitlab/database_spec.rb52
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb7
-rw-r--r--spec/lib/gitlab/diff/suggestion_diff_spec.rb55
-rw-r--r--spec/lib/gitlab/fake_application_settings_spec.rb31
-rw-r--r--spec/lib/gitlab/git/repository_cleaner_spec.rb12
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb5
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb38
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb11
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb69
-rw-r--r--spec/lib/gitlab/hashed_storage/migrator_spec.rb52
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb54
-rw-r--r--spec/lib/gitlab/kubernetes_spec.rb34
-rw-r--r--spec/lib/gitlab/middleware/basic_health_check_spec.rb29
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb32
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb2
-rw-r--r--spec/lib/gitlab/request_context_spec.rb27
-rw-r--r--spec/lib/gitlab/route_map_spec.rb2
-rw-r--r--spec/lib/gitlab/search_results_spec.rb16
-rw-r--r--spec/lib/gitlab/shell_spec.rb584
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb2
-rw-r--r--spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb72
-rw-r--r--spec/lib/gitlab/untrusted_regexp_spec.rb74
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb4
-rw-r--r--spec/lib/gitlab/user_extractor_spec.rb20
-rw-r--r--spec/lib/gitlab/utils_spec.rb18
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb12
-rw-r--r--spec/migrations/add_foreign_keys_to_todos_spec.rb2
-rw-r--r--spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb24
-rw-r--r--spec/migrations/calculate_conv_dev_index_percentages_spec.rb22
-rw-r--r--spec/migrations/cleanup_nonexisting_namespace_pending_delete_projects_spec.rb19
-rw-r--r--spec/migrations/delete_inconsistent_internal_id_records_spec.rb83
-rw-r--r--spec/migrations/issues_moved_to_id_foreign_key_spec.rb15
-rw-r--r--spec/migrations/migrate_old_artifacts_spec.rb32
-rw-r--r--spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb6
-rw-r--r--spec/migrations/migrate_user_project_view_spec.rb6
-rw-r--r--spec/migrations/move_personal_snippets_files_spec.rb35
-rw-r--r--spec/models/application_setting_spec.rb232
-rw-r--r--spec/models/broadcast_message_spec.rb6
-rw-r--r--spec/models/ci/build_spec.rb91
-rw-r--r--spec/models/ci/pipeline_spec.rb262
-rw-r--r--spec/models/clusters/applications/runner_spec.rb4
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb6
-rw-r--r--spec/models/commit_spec.rb16
-rw-r--r--spec/models/commit_status_spec.rb23
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb14
-rw-r--r--spec/models/concerns/has_ref_spec.rb4
-rw-r--r--spec/models/concerns/has_status_spec.rb22
-rw-r--r--spec/models/deployment_spec.rb28
-rw-r--r--spec/models/diff_note_spec.rb10
-rw-r--r--spec/models/environment_status_spec.rb4
-rw-r--r--spec/models/group_spec.rb121
-rw-r--r--spec/models/merge_request_diff_spec.rb21
-rw-r--r--spec/models/merge_request_spec.rb99
-rw-r--r--spec/models/namespace_spec.rb24
-rw-r--r--spec/models/note_spec.rb18
-rw-r--r--spec/models/project_services/kubernetes_service_spec.rb12
-rw-r--r--spec/models/project_spec.rb182
-rw-r--r--spec/models/project_wiki_spec.rb8
-rw-r--r--spec/models/repository_spec.rb60
-rw-r--r--spec/models/user_spec.rb62
-rw-r--r--spec/policies/group_policy_spec.rb13
-rw-r--r--spec/policies/identity_provider_policy_spec.rb30
-rw-r--r--spec/policies/project_policy_spec.rb4
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb134
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb70
-rw-r--r--spec/rack_servers/puma_spec.rb8
-rw-r--r--spec/requests/api/internal_spec.rb27
-rw-r--r--spec/requests/api/merge_requests_spec.rb407
-rw-r--r--spec/requests/api/project_clusters_spec.rb1
-rw-r--r--spec/requests/api/releases_spec.rb38
-rw-r--r--spec/requests/api/runner_spec.rb9
-rw-r--r--spec/requests/api/search_spec.rb79
-rw-r--r--spec/requests/api/users_spec.rb20
-rw-r--r--spec/routing/api_routing_spec.rb14
-rw-r--r--spec/routing/group_routing_spec.rb4
-rw-r--r--spec/serializers/pipeline_entity_spec.rb25
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb10
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb93
-rw-r--r--spec/services/ci/prepare_build_service_spec.rb54
-rw-r--r--spec/services/emails/create_service_spec.rb2
-rw-r--r--spec/services/emails/destroy_service_spec.rb2
-rw-r--r--spec/services/groups/auto_devops_service_spec.rb62
-rw-r--r--spec/services/labels/available_labels_service_spec.rb86
-rw-r--r--spec/services/projects/create_service_spec.rb1
-rw-r--r--spec/services/projects/destroy_service_spec.rb6
-rw-r--r--spec/services/projects/transfer_service_spec.rb1
-rw-r--r--spec/support/api/schema_matcher.rb24
-rw-r--r--spec/support/api/time_tracking_shared_examples.rb2
-rw-r--r--spec/support/helpers/graphql_helpers.rb10
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb28
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb16
-rw-r--r--spec/support/helpers/repo_helpers.rb14
-rw-r--r--spec/support/helpers/stub_configuration.rb4
-rw-r--r--spec/support/helpers/stub_object_storage.rb12
-rw-r--r--spec/support/helpers/test_env.rb10
-rw-r--r--spec/support/pg_stat_activity.rb19
-rw-r--r--spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb24
-rw-r--r--spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb44
-rw-r--r--spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb65
-rw-r--r--spec/support/shared_contexts/finders/users_finder_shared_contexts.rb8
-rw-r--r--spec/support/shared_examples/application_setting_examples.rb252
-rw-r--r--spec/support/shared_examples/malicious_regexp_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/requests/api/merge_requests_list.rb366
-rw-r--r--spec/support/shared_examples/snippet_visibility.rb322
-rw-r--r--spec/support/shared_examples/snippet_visibility_shared_examples.rb306
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb4
-rw-r--r--spec/tasks/gitlab/storage_rake_spec.rb108
-rw-r--r--spec/validators/devise_email_validator_spec.rb94
-rw-r--r--spec/validators/sha_validator_spec.rb9
-rw-r--r--spec/views/groups/_home_panel.html.haml_spec.rb15
-rw-r--r--spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb1
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb1
-rw-r--r--spec/workers/ci/build_prepare_worker_spec.rb30
-rw-r--r--spec/workers/cluster_configure_worker_spec.rb16
-rw-r--r--spec/workers/cluster_project_configure_worker_spec.rb21
360 files changed, 8429 insertions, 2741 deletions
diff --git a/spec/controllers/admin/projects_controller_spec.rb b/spec/controllers/admin/projects_controller_spec.rb
index 8166657f674..4caf8b46519 100644
--- a/spec/controllers/admin/projects_controller_spec.rb
+++ b/spec/controllers/admin/projects_controller_spec.rb
@@ -43,6 +43,16 @@ describe Admin::ProjectsController do
end
end
+ describe 'GET /projects.json' do
+ render_views
+
+ before do
+ get :index, format: :json
+ end
+
+ it { is_expected.to respond_with(:success) }
+ end
+
describe 'GET /projects/:id' do
render_views
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index 4458a7223bf..d8b75c5151e 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -371,5 +371,36 @@ describe AutocompleteController do
expect(json_response[3]).to match('name' => 'thumbsdown')
end
end
+
+ context 'Get merge_request_target_branches' do
+ let(:user2) { create(:user) }
+ let!(:merge_request1) { create(:merge_request, source_project: project, target_branch: 'feature') }
+
+ context 'unauthorized user' do
+ it 'returns empty json' do
+ get :merge_request_target_branches
+
+ expect(json_response).to be_empty
+ end
+ end
+
+ context 'sign in as user without any accesible merge requests' do
+ it 'returns empty json' do
+ sign_in(user2)
+ get :merge_request_target_branches
+
+ expect(json_response).to be_empty
+ end
+ end
+
+ context 'sign in as user with a accesible merge request' do
+ it 'returns json' do
+ sign_in(user)
+ get :merge_request_target_branches
+
+ expect(json_response).to contain_exactly({ 'title' => 'feature' })
+ end
+ end
+ end
end
end
diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb
index 2975205e09c..649441f4917 100644
--- a/spec/controllers/dashboard/projects_controller_spec.rb
+++ b/spec/controllers/dashboard/projects_controller_spec.rb
@@ -2,4 +2,30 @@ require 'spec_helper'
describe Dashboard::ProjectsController do
it_behaves_like 'authenticates sessionless user', :index, :atom
+
+ context 'json requests' do
+ render_views
+
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET /projects.json' do
+ before do
+ get :index, format: :json
+ end
+
+ it { is_expected.to respond_with(:success) }
+ end
+
+ describe 'GET /starred.json' do
+ before do
+ get :starred, format: :json
+ end
+
+ it { is_expected.to respond_with(:success) }
+ end
+ end
end
diff --git a/spec/controllers/explore/projects_controller_spec.rb b/spec/controllers/explore/projects_controller_spec.rb
index d57367e931e..7e20ddca249 100644
--- a/spec/controllers/explore/projects_controller_spec.rb
+++ b/spec/controllers/explore/projects_controller_spec.rb
@@ -1,6 +1,36 @@
require 'spec_helper'
describe Explore::ProjectsController do
+ describe 'GET #index.json' do
+ render_views
+
+ before do
+ get :index, format: :json
+ end
+
+ it { is_expected.to respond_with(:success) }
+ end
+
+ describe 'GET #trending.json' do
+ render_views
+
+ before do
+ get :trending, format: :json
+ end
+
+ it { is_expected.to respond_with(:success) }
+ end
+
+ describe 'GET #starred.json' do
+ render_views
+
+ before do
+ get :starred, format: :json
+ end
+
+ it { is_expected.to respond_with(:success) }
+ end
+
describe 'GET #trending' do
context 'sorting by update date' do
let(:project1) { create(:project, :public, updated_at: 3.days.ago) }
diff --git a/spec/controllers/groups/boards_controller_spec.rb b/spec/controllers/groups/boards_controller_spec.rb
index 4228e727b52..27ee37b3817 100644
--- a/spec/controllers/groups/boards_controller_spec.rb
+++ b/spec/controllers/groups/boards_controller_spec.rb
@@ -22,28 +22,6 @@ describe Groups::BoardsController do
expect(response.content_type).to eq 'text/html'
end
- it 'redirects to latest visited board' do
- board = create(:board, group: group)
- create(:board_group_recent_visit, group: board.group, board: board, user: user)
-
- list_boards
-
- expect(response).to redirect_to(group_board_path(id: board.id))
- end
-
- it 'renders template if visited board is not found' do
- temporary_board = create(:board, group: group)
- visited = create(:board_group_recent_visit, group: temporary_board.group, board: temporary_board, user: user)
- temporary_board.delete
-
- allow_any_instance_of(Boards::Visits::LatestService).to receive(:execute).and_return(visited)
-
- list_boards
-
- expect(response).to render_template :index
- expect(response.content_type).to eq 'text/html'
- end
-
context 'with unauthorized user' do
before do
allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(true)
diff --git a/spec/controllers/groups/settings/ci_cd_controller_spec.rb b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
index 40673d10b91..15eb0a442a6 100644
--- a/spec/controllers/groups/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
@@ -66,4 +66,77 @@ describe Groups::Settings::CiCdController do
end
end
end
+
+ describe 'PATCH #update_auto_devops' do
+ let(:auto_devops_param) { '1' }
+
+ subject do
+ patch :update_auto_devops, params: {
+ group_id: group,
+ group: { auto_devops_enabled: auto_devops_param }
+ }
+ end
+
+ context 'when user does not have enough permission' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it { is_expected.to have_gitlab_http_status(404) }
+ end
+
+ context 'when user has enough privileges' do
+ before do
+ group.add_owner(user)
+ end
+
+ it { is_expected.to redirect_to(group_settings_ci_cd_path) }
+
+ context 'when service execution went wrong' do
+ before do
+ allow_any_instance_of(Groups::AutoDevopsService).to receive(:execute).and_return(false)
+ allow_any_instance_of(Group).to receive_message_chain(:errors, :full_messages)
+ .and_return(['Error 1'])
+
+ subject
+ end
+
+ it 'returns a flash alert' do
+ expect(response).to set_flash[:alert]
+ .to eq("There was a problem updating Auto DevOps pipeline: [\"Error 1\"].")
+ end
+ end
+
+ context 'when service execution was successful' do
+ it 'returns a flash notice' do
+ subject
+
+ expect(response).to set_flash[:notice]
+ .to eq('Auto DevOps pipeline was updated for the group')
+ end
+ end
+
+ context 'when changing auto devops value' do
+ before do
+ subject
+
+ group.reload
+ end
+
+ context 'when explicitly enabling auto devops' do
+ it 'should update group attribute' do
+ expect(group.auto_devops_enabled).to eq(true)
+ end
+ end
+
+ context 'when explicitly disabling auto devops' do
+ let(:auto_devops_param) { '0' }
+
+ it 'should update group attribute' do
+ expect(group.auto_devops_enabled).to eq(false)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 21e5122c06b..b2e6df6060a 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -32,21 +32,46 @@ describe GroupsController do
end
end
+ shared_examples 'details view' do
+ it { is_expected.to render_template('groups/show') }
+
+ context 'as atom' do
+ let!(:event) { create(:event, project: project) }
+ let(:format) { :atom }
+
+ it { is_expected.to render_template('groups/show') }
+
+ it 'assigns events for all the projects in the group' do
+ subject
+ expect(assigns(:events)).to contain_exactly(event)
+ end
+ end
+ end
+
describe 'GET #show' do
before do
sign_in(user)
project
end
- context 'as atom' do
- it 'assigns events for all the projects in the group' do
- create(:event, project: project)
+ let(:format) { :html }
- get :show, params: { id: group.to_param }, format: :atom
+ subject { get :show, params: { id: group.to_param }, format: format }
- expect(assigns(:events)).not_to be_empty
- end
+ it_behaves_like 'details view'
+ end
+
+ describe 'GET #details' do
+ before do
+ sign_in(user)
+ project
end
+
+ let(:format) { :html }
+
+ subject { get :details, params: { id: group.to_param }, format: format }
+
+ it_behaves_like 'details view'
end
describe 'GET edit' do
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index e0da23ca0b8..06c6f49f7cc 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -113,6 +113,33 @@ describe OmniauthCallbacksController, type: :controller do
expect(request.env['warden']).to be_authenticated
end
+ context 'when user has no linked provider' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in user
+ end
+
+ it 'links identity' do
+ expect do
+ post provider
+ user.reload
+ end.to change { user.identities.count }.by(1)
+ end
+
+ context 'and is not allowed to link the provider' do
+ before do
+ allow_any_instance_of(IdentityProviderPolicy).to receive(:can?).with(:link).and_return(false)
+ end
+
+ it 'returns 403' do
+ post provider
+
+ expect(response).to have_gitlab_http_status(403)
+ end
+ end
+ end
+
shared_context 'sign_up' do
let(:user) { double(email: 'new@example.com') }
diff --git a/spec/controllers/projects/boards_controller_spec.rb b/spec/controllers/projects/boards_controller_spec.rb
index 09199067024..1eeded06459 100644
--- a/spec/controllers/projects/boards_controller_spec.rb
+++ b/spec/controllers/projects/boards_controller_spec.rb
@@ -28,28 +28,6 @@ describe Projects::BoardsController do
expect(response.content_type).to eq 'text/html'
end
- it 'redirects to latest visited board' do
- board = create(:board, project: project)
- create(:board_project_recent_visit, project: board.project, board: board, user: user)
-
- list_boards
-
- expect(response).to redirect_to(namespace_project_board_path(id: board.id))
- end
-
- it 'renders template if visited board is not found' do
- temporary_board = create(:board, project: project)
- visited = create(:board_project_recent_visit, project: temporary_board.project, board: temporary_board, user: user)
- temporary_board.delete
-
- allow_any_instance_of(Boards::Visits::LatestService).to receive(:execute).and_return(visited)
-
- list_boards
-
- expect(response).to render_template :index
- expect(response.content_type).to eq 'text/html'
- end
-
context 'with unauthorized user' do
before do
allow(Ability).to receive(:allowed?).with(user, :read_project, project).and_return(true)
diff --git a/spec/controllers/projects/git_http_controller_spec.rb b/spec/controllers/projects/git_http_controller_spec.rb
new file mode 100644
index 00000000000..bf099e8deeb
--- /dev/null
+++ b/spec/controllers/projects/git_http_controller_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::GitHttpController do
+ describe 'HEAD #info_refs' do
+ it 'returns 403' do
+ project = create(:project, :public, :repository)
+
+ head :info_refs, params: { namespace_id: project.namespace.to_param, project_id: project.path + '.git' }
+
+ expect(response.status).to eq(403)
+ end
+ end
+end
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 0b0f5117784..deecb7fefe9 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -413,6 +413,37 @@ describe Projects::NotesController do
end
end
end
+
+ context 'when creating a note with quick actions' do
+ context 'with commands that return changes' do
+ let(:note_text) { "/award :thumbsup:\n/estimate 1d\n/spend 3h" }
+
+ it 'includes changes in commands_changes ' do
+ post :create, params: request_params.merge(note: { note: note_text }, format: :json)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['commands_changes']).to include('emoji_award', 'time_estimate', 'spend_time')
+ expect(json_response['commands_changes']).not_to include('target_project', 'title')
+ end
+ end
+
+ context 'with commands that do not return changes' do
+ let(:issue) { create(:issue, project: project) }
+ let(:other_project) { create(:project) }
+ let(:note_text) { "/move #{other_project.full_path}\n/title AAA" }
+
+ before do
+ other_project.add_developer(user)
+ end
+
+ it 'does not include changes in commands_changes' do
+ post :create, params: request_params.merge(note: { note: note_text }, target_type: 'issue', target_id: issue.id, format: :json)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response['commands_changes']).not_to include('target_project', 'title')
+ end
+ end
+ end
end
describe 'PUT update' do
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index a1662658ade..356d606d5c5 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -369,6 +369,23 @@ describe ProjectsController do
end
end
+ it 'does not update namespace' do
+ controller.instance_variable_set(:@project, project)
+
+ params = {
+ namespace_id: 'test'
+ }
+
+ expect do
+ put :update,
+ params: {
+ namespace_id: project.namespace,
+ id: project.id,
+ project: params
+ }
+ end.not_to change { project.namespace.reload }
+ end
+
def update_project(**parameters)
put :update,
params: {
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index fd151e8a298..c1baf88778d 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -15,7 +15,7 @@ describe RegistrationsController do
context 'when send_user_confirmation_email is false' do
it 'signs the user in' do
- allow_any_instance_of(ApplicationSetting).to receive(:send_user_confirmation_email).and_return(false)
+ stub_application_setting(send_user_confirmation_email: false)
expect { post(:create, params: user_params) }.not_to change { ActionMailer::Base.deliveries.size }
expect(subject.current_user).not_to be_nil
@@ -24,7 +24,7 @@ describe RegistrationsController do
context 'when send_user_confirmation_email is true' do
it 'does not authenticate user and sends confirmation email' do
- allow_any_instance_of(ApplicationSetting).to receive(:send_user_confirmation_email).and_return(true)
+ stub_application_setting(send_user_confirmation_email: true)
post(:create, params: user_params)
@@ -35,7 +35,7 @@ describe RegistrationsController do
context 'when signup_enabled? is false' do
it 'redirects to sign_in' do
- allow_any_instance_of(ApplicationSetting).to receive(:signup_enabled?).and_return(false)
+ stub_application_setting(signup_enabled: false)
expect { post(:create, params: user_params) }.not_to change(User, :count)
expect(response).to redirect_to(new_user_session_path)
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 0b3e67b4987..067391c1179 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -75,6 +75,10 @@ FactoryBot.define do
status 'created'
end
+ trait :preparing do
+ status 'preparing'
+ end
+
trait :scheduled do
schedulable
status 'scheduled'
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index 8a44ce52849..aa5ccbda6cd 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -50,6 +50,14 @@ FactoryBot.define do
failure_reason :config_error
end
+ trait :created do
+ status :created
+ end
+
+ trait :preparing do
+ status :preparing
+ end
+
trait :blocked do
status :manual
end
@@ -82,6 +90,12 @@ FactoryBot.define do
end
end
+ trait :with_job do
+ after(:build) do |pipeline, evaluator|
+ pipeline.builds << build(:ci_build, pipeline: pipeline, project: pipeline.project)
+ end
+ end
+
trait :auto_devops_source do
config_source { Ci::Pipeline.config_sources[:auto_devops_source] }
end
diff --git a/spec/factories/clusters/clusters.rb b/spec/factories/clusters/clusters.rb
index a2e5f4862db..1cc3c0e03d8 100644
--- a/spec/factories/clusters/clusters.rb
+++ b/spec/factories/clusters/clusters.rb
@@ -12,7 +12,7 @@ FactoryBot.define do
cluster_type { Clusters::Cluster.cluster_types[:project_type] }
before(:create) do |cluster, evaluator|
- cluster.projects << create(:project, :repository)
+ cluster.projects << create(:project, :repository) unless cluster.projects.present?
end
end
@@ -20,7 +20,7 @@ FactoryBot.define do
cluster_type { Clusters::Cluster.cluster_types[:group_type] }
before(:create) do |cluster, evalutor|
- cluster.groups << create(:group)
+ cluster.groups << create(:group) unless cluster.groups.present?
end
end
diff --git a/spec/factories/clusters/providers/gcp.rb b/spec/factories/clusters/providers/gcp.rb
index a002ab28519..186c7c8027c 100644
--- a/spec/factories/clusters/providers/gcp.rb
+++ b/spec/factories/clusters/providers/gcp.rb
@@ -28,5 +28,9 @@ FactoryBot.define do
gcp.make_errored('Something wrong')
end
end
+
+ trait :abac_enabled do
+ legacy_abac true
+ end
end
end
diff --git a/spec/factories/commit_statuses.rb b/spec/factories/commit_statuses.rb
index 381bf07f6a0..848a31e96c1 100644
--- a/spec/factories/commit_statuses.rb
+++ b/spec/factories/commit_statuses.rb
@@ -33,6 +33,10 @@ FactoryBot.define do
status 'pending'
end
+ trait :preparing do
+ status 'preparing'
+ end
+
trait :created do
status 'created'
end
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 3b354c0d96b..dcef8571f41 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -36,5 +36,13 @@ FactoryBot.define do
trait :nested do
parent factory: :group
end
+
+ trait :auto_devops_enabled do
+ auto_devops_enabled true
+ end
+
+ trait :auto_devops_disabled do
+ auto_devops_enabled false
+ end
end
end
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 18f724770b5..a73f330a7a9 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -101,12 +101,33 @@ FactoryBot.define do
end
end
- trait :with_merge_request_pipeline do
+ trait :with_detached_merge_request_pipeline do
after(:build) do |merge_request|
merge_request.merge_request_pipelines << build(:ci_pipeline,
source: :merge_request_event,
merge_request: merge_request,
- project: merge_request.source_project)
+ project: merge_request.source_project,
+ ref: merge_request.ref_path,
+ sha: merge_request.source_branch_sha)
+ end
+ end
+
+ trait :with_merge_request_pipeline do
+ transient do
+ merge_sha { 'test-merge-sha' }
+ source_sha { source_branch_sha }
+ target_sha { target_branch_sha }
+ end
+
+ after(:build) do |merge_request, evaluator|
+ merge_request.merge_request_pipelines << create(:ci_pipeline,
+ source: :merge_request_event,
+ merge_request: merge_request,
+ project: merge_request.source_project,
+ ref: merge_request.merge_ref_path,
+ sha: evaluator.merge_sha,
+ source_sha: evaluator.source_sha,
+ target_sha: evaluator.target_sha)
end
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 30d3b22d868..ab185ab3972 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -271,6 +271,10 @@ FactoryBot.define do
trait :auto_devops do
association :auto_devops, factory: :project_auto_devops
end
+
+ trait :auto_devops_disabled do
+ association :auto_devops, factory: [:project_auto_devops, :disabled]
+ end
end
# Project with empty repository
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index 0a9c4bcaf12..b9fc52d0dce 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -4,6 +4,8 @@ require 'spec_helper'
describe 'Clusterable > Show page' do
let(:current_user) { create(:user) }
+ let(:cluster_ingress_help_text_selector) { '.js-ingress-domain-help-text' }
+ let(:hide_modifier_selector) { '.hide' }
before do
sign_in(current_user)
@@ -35,7 +37,7 @@ describe 'Clusterable > Show page' do
it 'shows help text with the domain as an alternative to custom domain' do
within '#cluster-integration' do
- expect(page).to have_content('Alternatively 192.168.1.100.nip.io can be used instead of a custom domain')
+ expect(find(cluster_ingress_help_text_selector)).not_to match_css(hide_modifier_selector)
end
end
end
@@ -45,7 +47,7 @@ describe 'Clusterable > Show page' do
visit cluster_path
within '#cluster-integration' do
- expect(page).not_to have_content('can be used instead of a custom domain.')
+ expect(find(cluster_ingress_help_text_selector)).to match_css(hide_modifier_selector)
end
end
end
diff --git a/spec/features/groups/settings/ci_cd_spec.rb b/spec/features/groups/settings/ci_cd_spec.rb
index d422fd18346..0f793dbab6e 100644
--- a/spec/features/groups/settings/ci_cd_spec.rb
+++ b/spec/features/groups/settings/ci_cd_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
describe 'Group CI/CD settings' do
include WaitForRequests
- let(:user) {create(:user)}
- let(:group) {create(:group)}
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
before do
group.add_owner(user)
@@ -36,4 +36,45 @@ describe 'Group CI/CD settings' do
end
end
end
+
+ describe 'Auto DevOps form' do
+ before do
+ stub_application_setting(auto_devops_enabled: true)
+ end
+
+ context 'as owner first visiting group settings' do
+ it 'should see instance enabled badge' do
+ visit group_settings_ci_cd_path(group)
+
+ page.within '#auto-devops-settings' do
+ expect(page).to have_content('instance enabled')
+ end
+ end
+ end
+
+ context 'when Auto DevOps group has been enabled' do
+ it 'should see group enabled badge' do
+ group.update!(auto_devops_enabled: true)
+
+ visit group_settings_ci_cd_path(group)
+
+ page.within '#auto-devops-settings' do
+ expect(page).to have_content('group enabled')
+ end
+ end
+ end
+
+ context 'when Auto DevOps group has been disabled' do
+ it 'should not see a badge' do
+ group.update!(auto_devops_enabled: false)
+
+ visit group_settings_ci_cd_path(group)
+
+ page.within '#auto-devops-settings' do
+ expect(page).not_to have_content('instance enabled')
+ expect(page).not_to have_content('group enabled')
+ end
+ end
+ end
+ end
end
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 986f3823275..8eb413bdd8d 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -278,12 +278,7 @@ describe 'GFM autocomplete', :js do
end
end
- # This context has just one example in each contexts in order to improve spec performance.
- context 'labels', :quarantine do
- let!(:backend) { create(:label, project: project, title: 'backend') }
- let!(:bug) { create(:label, project: project, title: 'bug') }
- let!(:feature_proposal) { create(:label, project: project, title: 'feature proposal') }
-
+ context 'labels' do
it 'opens autocomplete menu for Labels when field starts with text with item escaping HTML characters' do
create(:label, project: project, title: label_xss_title)
@@ -298,83 +293,6 @@ describe 'GFM autocomplete', :js do
expect(find('.atwho-view-ul').text).to have_content('alert label')
end
end
-
- context 'when no labels are assigned' do
- it 'shows labels' do
- note = find('#note-body')
-
- # It should show all the labels on "~".
- type(note, '~')
- wait_for_requests
- expect_labels(shown: [backend, bug, feature_proposal])
-
- # It should show all the labels on "/label ~".
- type(note, '/label ~')
- expect_labels(shown: [backend, bug, feature_proposal])
-
- # It should show all the labels on "/relabel ~".
- type(note, '/relabel ~')
- expect_labels(shown: [backend, bug, feature_proposal])
-
- # It should show no labels on "/unlabel ~".
- type(note, '/unlabel ~')
- expect_labels(not_shown: [backend, bug, feature_proposal])
- end
- end
-
- context 'when some labels are assigned' do
- before do
- issue.labels << [backend]
- end
-
- it 'shows labels' do
- note = find('#note-body')
-
- # It should show all the labels on "~".
- type(note, '~')
- wait_for_requests
- expect_labels(shown: [backend, bug, feature_proposal])
-
- # It should show only unset labels on "/label ~".
- type(note, '/label ~')
- expect_labels(shown: [bug, feature_proposal], not_shown: [backend])
-
- # It should show all the labels on "/relabel ~".
- type(note, '/relabel ~')
- expect_labels(shown: [backend, bug, feature_proposal])
-
- # It should show only set labels on "/unlabel ~".
- type(note, '/unlabel ~')
- expect_labels(shown: [backend], not_shown: [bug, feature_proposal])
- end
- end
-
- context 'when all labels are assigned' do
- before do
- issue.labels << [backend, bug, feature_proposal]
- end
-
- it 'shows labels' do
- note = find('#note-body')
-
- # It should show all the labels on "~".
- type(note, '~')
- wait_for_requests
- expect_labels(shown: [backend, bug, feature_proposal])
-
- # It should show no labels on "/label ~".
- type(note, '/label ~')
- expect_labels(not_shown: [backend, bug, feature_proposal])
-
- # It should show all the labels on "/relabel ~".
- type(note, '/relabel ~')
- expect_labels(shown: [backend, bug, feature_proposal])
-
- # It should show all the labels on "/unlabel ~".
- type(note, '/unlabel ~')
- expect_labels(shown: [backend, bug, feature_proposal])
- end
- end
end
shared_examples 'autocomplete suggestions' do
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
index 9bc340ed4bb..51508b78649 100644
--- a/spec/features/issues_spec.rb
+++ b/spec/features/issues_spec.rb
@@ -497,12 +497,21 @@ describe 'Issues' do
it 'allows user to unselect themselves', :js do
issue2 = create(:issue, project: project, author: user)
+
visit project_issue_path(project, issue2)
+ def close_dropdown_menu_if_visible
+ find('.dropdown-menu-toggle', visible: :all).tap do |toggle|
+ toggle.click if toggle.visible?
+ end
+ end
+
page.within '.assignee' do
click_link 'Edit'
click_link user.name
+ close_dropdown_menu_if_visible
+
page.within '.value .author' do
expect(page).to have_content user.name
end
@@ -510,6 +519,8 @@ describe 'Issues' do
click_link 'Edit'
click_link user.name
+ close_dropdown_menu_if_visible
+
page.within '.value .assign-yourself' do
expect(page).to have_content "No assignee"
end
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 97b2aa82fce..28f88718ec1 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -2,7 +2,7 @@
require 'rails_helper'
-describe 'Merge request > User sees merge request pipelines', :js do
+describe 'Merge request > User sees pipelines triggered by merge request', :js do
include ProjectForksHelper
include TestReportsHelper
@@ -47,7 +47,7 @@ describe 'Merge request > User sees merge request pipelines', :js do
.execute(:push)
end
- let!(:merge_request_pipeline) do
+ let!(:detached_merge_request_pipeline) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
end
@@ -60,16 +60,16 @@ describe 'Merge request > User sees merge request pipelines', :js do
end
end
- it 'sees branch pipelines and merge request pipelines in correct order' do
+ it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
expect(page).to have_selector('.ci-pending', count: 2)
- expect(first('.js-pipeline-url-link')).to have_content("##{merge_request_pipeline.id}")
+ expect(first('.js-pipeline-url-link')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
- it 'sees the latest merge request pipeline as the head pipeline' do
+ it 'sees the latest detached merge request pipeline as the head pipeline' do
page.within('.ci-widget-content') do
- expect(page).to have_content("##{merge_request_pipeline.id}")
+ expect(page).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -79,7 +79,7 @@ describe 'Merge request > User sees merge request pipelines', :js do
.execute(:push)
end
- let!(:merge_request_pipeline_2) do
+ let!(:detached_merge_request_pipeline_2) do
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
end
@@ -92,15 +92,15 @@ describe 'Merge request > User sees merge request pipelines', :js do
end
end
- it 'sees branch pipelines and merge request pipelines in correct order' do
+ it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
expect(page).to have_selector('.ci-pending', count: 4)
expect(all('.js-pipeline-url-link')[0])
- .to have_content("##{merge_request_pipeline_2.id}")
+ .to have_content("##{detached_merge_request_pipeline_2.id}")
expect(all('.js-pipeline-url-link')[1])
- .to have_content("##{merge_request_pipeline.id}")
+ .to have_content("##{detached_merge_request_pipeline.id}")
expect(all('.js-pipeline-url-link')[2])
.to have_content("##{push_pipeline_2.id}")
@@ -110,25 +110,25 @@ describe 'Merge request > User sees merge request pipelines', :js do
end
end
- it 'sees merge request tag for merge request pipelines' do
+ it 'sees detached tag for detached merge request pipelines' do
page.within('.ci-table') do
expect(all('.pipeline-tags')[0])
- .to have_content("merge request")
+ .to have_content("detached")
expect(all('.pipeline-tags')[1])
- .to have_content("merge request")
+ .to have_content("detached")
expect(all('.pipeline-tags')[2])
- .not_to have_content("merge request")
+ .not_to have_content("detached")
expect(all('.pipeline-tags')[3])
- .not_to have_content("merge request")
+ .not_to have_content("detached")
end
end
- it 'sees the latest merge request pipeline as the head pipeline' do
+ it 'sees the latest detached merge request pipeline as the head pipeline' do
page.within('.ci-widget-content') do
- expect(page).to have_content("##{merge_request_pipeline_2.id}")
+ expect(page).to have_content("##{detached_merge_request_pipeline_2.id}")
end
end
end
@@ -140,16 +140,16 @@ describe 'Merge request > User sees merge request pipelines', :js do
wait_for_requests
end
- context 'when merge request pipeline is pending' do
+ context 'when detached merge request pipeline is pending' do
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
expect(page).to have_link('Cancel automatic merge')
end
end
- context 'when merge request pipeline succeeds' do
+ context 'when detached merge request pipeline succeeds' do
before do
- merge_request_pipeline.succeed!
+ detached_merge_request_pipeline.succeed!
wait_for_requests
end
@@ -218,7 +218,7 @@ describe 'Merge request > User sees merge request pipelines', :js do
.execute(:push)
end
- let!(:merge_request_pipeline) do
+ let!(:detached_merge_request_pipeline) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
end
@@ -236,16 +236,16 @@ describe 'Merge request > User sees merge request pipelines', :js do
end
end
- it 'sees branch pipelines and merge request pipelines in correct order' do
+ it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
expect(page).to have_selector('.ci-pending', count: 2)
- expect(first('.js-pipeline-url-link')).to have_content("##{merge_request_pipeline.id}")
+ expect(first('.js-pipeline-url-link')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
- it 'sees the latest merge request pipeline as the head pipeline' do
+ it 'sees the latest detached merge request pipeline as the head pipeline' do
page.within('.ci-widget-content') do
- expect(page).to have_content("##{merge_request_pipeline.id}")
+ expect(page).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -261,7 +261,7 @@ describe 'Merge request > User sees merge request pipelines', :js do
.execute(:push)
end
- let!(:merge_request_pipeline_2) do
+ let!(:detached_merge_request_pipeline_2) do
Ci::CreatePipelineService.new(forked_project, user2, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
end
@@ -274,15 +274,15 @@ describe 'Merge request > User sees merge request pipelines', :js do
end
end
- it 'sees branch pipelines and merge request pipelines in correct order' do
+ it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
expect(page).to have_selector('.ci-pending', count: 4)
expect(all('.js-pipeline-url-link')[0])
- .to have_content("##{merge_request_pipeline_2.id}")
+ .to have_content("##{detached_merge_request_pipeline_2.id}")
expect(all('.js-pipeline-url-link')[1])
- .to have_content("##{merge_request_pipeline.id}")
+ .to have_content("##{detached_merge_request_pipeline.id}")
expect(all('.js-pipeline-url-link')[2])
.to have_content("##{push_pipeline_2.id}")
@@ -292,25 +292,25 @@ describe 'Merge request > User sees merge request pipelines', :js do
end
end
- it 'sees merge request tag for merge request pipelines' do
+ it 'sees detached tag for detached merge request pipelines' do
page.within('.ci-table') do
expect(all('.pipeline-tags')[0])
- .to have_content("merge request")
+ .to have_content("detached")
expect(all('.pipeline-tags')[1])
- .to have_content("merge request")
+ .to have_content("detached")
expect(all('.pipeline-tags')[2])
- .not_to have_content("merge request")
+ .not_to have_content("detached")
expect(all('.pipeline-tags')[3])
- .not_to have_content("merge request")
+ .not_to have_content("detached")
end
end
- it 'sees the latest merge request pipeline as the head pipeline' do
+ it 'sees the latest detached merge request pipeline as the head pipeline' do
page.within('.ci-widget-content') do
- expect(page).to have_content("##{merge_request_pipeline_2.id}")
+ expect(page).to have_content("##{detached_merge_request_pipeline_2.id}")
end
end
@@ -328,16 +328,16 @@ describe 'Merge request > User sees merge request pipelines', :js do
wait_for_requests
end
- context 'when merge request pipeline is pending' do
+ context 'when detached merge request pipeline is pending' do
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
expect(page).to have_link('Cancel automatic merge')
end
end
- context 'when merge request pipeline succeeds' do
+ context 'when detached merge request pipeline succeeds' do
before do
- merge_request_pipeline.succeed!
+ detached_merge_request_pipeline.succeed!
wait_for_requests
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index afb978d7c45..2609546990d 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -145,6 +145,119 @@ describe 'Merge request > User sees merge widget', :js do
end
end
+ context 'when merge request has a branch pipeline as the head pipeline' do
+ let!(:pipeline) do
+ create(:ci_pipeline,
+ ref: merge_request.source_branch,
+ sha: merge_request.source_branch_sha,
+ project: merge_request.source_project)
+ end
+
+ before do
+ merge_request.update_head_pipeline
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'shows head pipeline information' do
+ within '.ci-widget-content' do
+ expect(page).to have_content("Pipeline ##{pipeline.id} pending " \
+ "for #{pipeline.short_sha} " \
+ "on #{pipeline.ref}")
+ end
+ end
+ end
+
+ context 'when merge request has a detached merge request pipeline as the head pipeline' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_detached_merge_request_pipeline,
+ source_project: source_project,
+ target_project: target_project)
+ end
+
+ let!(:pipeline) do
+ merge_request.all_pipelines.last
+ end
+
+ let(:source_project) { project }
+ let(:target_project) { project }
+
+ before do
+ merge_request.update_head_pipeline
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'shows head pipeline information' do
+ within '.ci-widget-content' do
+ expect(page).to have_content("Pipeline ##{pipeline.id} pending " \
+ "for #{pipeline.short_sha} " \
+ "on #{merge_request.to_reference} " \
+ "with #{merge_request.source_branch}")
+ end
+ end
+
+ context 'when source project is a forked project' do
+ let(:source_project) { fork_project(project, user, repository: true) }
+
+ it 'shows head pipeline information' do
+ within '.ci-widget-content' do
+ expect(page).to have_content("Pipeline ##{pipeline.id} pending " \
+ "for #{pipeline.short_sha} " \
+ "on #{merge_request.to_reference} " \
+ "with #{merge_request.source_branch}")
+ end
+ end
+ end
+ end
+
+ context 'when merge request has a merge request pipeline as the head pipeline' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ source_project: source_project,
+ target_project: target_project,
+ merge_sha: merge_sha)
+ end
+
+ let!(:pipeline) do
+ merge_request.all_pipelines.last
+ end
+
+ let(:source_project) { project }
+ let(:target_project) { project }
+ let(:merge_sha) { project.commit.sha }
+
+ before do
+ merge_request.update_head_pipeline
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'shows head pipeline information' do
+ within '.ci-widget-content' do
+ expect(page).to have_content("Pipeline ##{pipeline.id} pending " \
+ "for #{pipeline.short_sha} " \
+ "on #{merge_request.to_reference} " \
+ "with #{merge_request.source_branch} " \
+ "into #{merge_request.target_branch}")
+ end
+ end
+
+ context 'when source project is a forked project' do
+ let(:source_project) { fork_project(project, user, repository: true) }
+ let(:merge_sha) { source_project.commit.sha }
+
+ it 'shows head pipeline information' do
+ within '.ci-widget-content' do
+ expect(page).to have_content("Pipeline ##{pipeline.id} pending " \
+ "for #{pipeline.short_sha} " \
+ "on #{merge_request.to_reference} " \
+ "with #{merge_request.source_branch} " \
+ "into #{merge_request.target_branch}")
+ end
+ end
+ end
+ end
+
context 'view merge request with MWBS button' do
before do
commit_status = create(:commit_status, project: project, status: 'pending')
diff --git a/spec/features/merge_request/user_views_diffs_spec.rb b/spec/features/merge_request/user_views_diffs_spec.rb
index 0434db04113..74342b16cb2 100644
--- a/spec/features/merge_request/user_views_diffs_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_spec.rb
@@ -34,6 +34,16 @@ describe 'User views diffs', :js do
expect(page).not_to have_selector('.mr-loading-status .loading', visible: true)
end
+ it 'expands all diffs' do
+ first('#a5cc2925ca8258af241be7e5b0381edf30266302 .js-file-title').click
+
+ expect(page).to have_button('Expand all')
+
+ click_button 'Expand all'
+
+ expect(page).not_to have_button('Expand all')
+ end
+
context 'when in the inline view' do
include_examples 'unfold diffs'
end
diff --git a/spec/features/merge_requests/user_filters_by_target_branch_spec.rb b/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
new file mode 100644
index 00000000000..ffbdacc68f6
--- /dev/null
+++ b/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
@@ -0,0 +1,45 @@
+require 'rails_helper'
+
+describe 'Merge Requests > User filters by target branch', :js do
+ include FilteredSearchHelpers
+
+ let!(:project) { create(:project, :public, :repository) }
+ let!(:user) { project.creator }
+ let!(:mr1) { create(:merge_request, source_project: project, target_project: project, source_branch: 'feature', target_branch: 'master') }
+ let!(:mr2) { create(:merge_request, source_project: project, target_project: project, source_branch: 'feature', target_branch: 'merged-target') }
+
+ before do
+ sign_in(user)
+ visit project_merge_requests_path(project)
+ end
+
+ context 'filtering by target-branch:master' do
+ it 'applies the filter' do
+ input_filtered_search('target-branch:master')
+
+ expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
+ expect(page).to have_content mr1.title
+ expect(page).not_to have_content mr2.title
+ end
+ end
+
+ context 'filtering by target-branch:merged-target' do
+ it 'applies the filter' do
+ input_filtered_search('target-branch:merged-target')
+
+ expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
+ expect(page).not_to have_content mr1.title
+ expect(page).to have_content mr2.title
+ end
+ end
+
+ context 'filtering by target-branch:feature' do
+ it 'applies the filter' do
+ input_filtered_search('target-branch:feature')
+
+ expect(page).to have_issuable_counts(open: 0, closed: 0, all: 0)
+ expect(page).not_to have_content mr1.title
+ expect(page).not_to have_content mr2.title
+ end
+ end
+end
diff --git a/spec/features/projects/badges/pipeline_badge_spec.rb b/spec/features/projects/badges/pipeline_badge_spec.rb
index dee81898928..4ac4e8f0fcb 100644
--- a/spec/features/projects/badges/pipeline_badge_spec.rb
+++ b/spec/features/projects/badges/pipeline_badge_spec.rb
@@ -41,6 +41,25 @@ describe 'Pipeline Badge' do
end
end
+ context 'when the pipeline is preparing' do
+ let!(:job) { create(:ci_build, status: 'created', pipeline: pipeline) }
+
+ before do
+ # Prevent skipping directly to 'pending'
+ allow(Ci::BuildPrepareWorker).to receive(:perform_async)
+ allow(job).to receive(:prerequisites).and_return([double])
+ end
+
+ it 'displays the preparing badge' do
+ job.enqueue
+
+ visit pipeline_project_badges_path(project, ref: ref, format: :svg)
+
+ expect(page.status_code).to eq(200)
+ expect_badge('preparing')
+ end
+ end
+
context 'when the pipeline is running' do
it 'shows displays so on the badge' do
create(:ci_build, pipeline: pipeline, name: 'second build', status_event: 'run')
diff --git a/spec/features/projects/clusters/applications_spec.rb b/spec/features/projects/clusters/applications_spec.rb
index 4981bf794d9..aa1c3902f0f 100644
--- a/spec/features/projects/clusters/applications_spec.rb
+++ b/spec/features/projects/clusters/applications_spec.rb
@@ -227,7 +227,7 @@ describe 'Clusters Applications', :js do
expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Installed')
expect(page).to have_css('.js-cluster-application-install-button[disabled]')
expect(page).to have_selector('.js-no-endpoint-message')
- expect(page.find('.js-endpoint').value).to eq('?')
+ expect(page).to have_selector('.js-ingress-ip-loading-icon')
# We receive the external IP address and display
Clusters::Cluster.last.application_ingress.update!(external_ip: '192.168.1.100')
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 65ce872363f..224375daf71 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -2,6 +2,9 @@ require 'spec_helper'
require 'tempfile'
describe 'Jobs', :clean_gitlab_redis_shared_state do
+ include Gitlab::Routing
+ include ProjectForksHelper
+
let(:user) { create(:user) }
let(:user_access_level) { :developer }
let(:project) { create(:project, :repository) }
@@ -121,6 +124,112 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
end
end
+ context 'pipeline info block', :js do
+ it 'shows pipeline id and source branch' do
+ visit project_job_path(project, job)
+
+ within '.js-pipeline-info' do
+ expect(page).to have_content("Pipeline ##{pipeline.id} for #{pipeline.ref}")
+ end
+ end
+
+ context 'when pipeline is detached merge request pipeline' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_detached_merge_request_pipeline,
+ target_project: target_project,
+ source_project: source_project)
+ end
+
+ let(:source_project) { project }
+ let(:target_project) { project }
+ let(:pipeline) { merge_request.all_pipelines.last }
+ let(:job) { create(:ci_build, pipeline: pipeline) }
+
+ it 'shows merge request iid and source branch' do
+ visit project_job_path(project, job)
+
+ within '.js-pipeline-info' do
+ expect(page).to have_content("for !#{pipeline.merge_request.iid} " \
+ "with #{pipeline.merge_request.source_branch}")
+ expect(page).to have_link("!#{pipeline.merge_request.iid}",
+ href: project_merge_request_path(project, merge_request))
+ expect(page).to have_link(pipeline.merge_request.source_branch,
+ href: project_commits_path(project, merge_request.source_branch))
+ end
+ end
+
+ context 'when source project is a forked project' do
+ let(:source_project) { fork_project(project, user, repository: true) }
+ let(:target_project) { project }
+
+ it 'shows merge request iid and source branch' do
+ visit project_job_path(source_project, job)
+
+ within '.js-pipeline-info' do
+ expect(page).to have_content("for !#{pipeline.merge_request.iid} " \
+ "with #{pipeline.merge_request.source_branch}")
+ expect(page).to have_link("!#{pipeline.merge_request.iid}",
+ href: project_merge_request_path(project, merge_request))
+ expect(page).to have_link(pipeline.merge_request.source_branch,
+ href: project_commits_path(source_project, merge_request.source_branch))
+ end
+ end
+ end
+ end
+
+ context 'when pipeline is merge request pipeline' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ target_project: target_project,
+ source_project: source_project)
+ end
+
+ let(:source_project) { project }
+ let(:target_project) { project }
+ let(:pipeline) { merge_request.all_pipelines.last }
+ let(:job) { create(:ci_build, pipeline: pipeline) }
+
+ it 'shows merge request iid and source branch' do
+ visit project_job_path(project, job)
+
+ within '.js-pipeline-info' do
+ expect(page).to have_content("for !#{pipeline.merge_request.iid} " \
+ "with #{pipeline.merge_request.source_branch} " \
+ "into #{pipeline.merge_request.target_branch}")
+ expect(page).to have_link("!#{pipeline.merge_request.iid}",
+ href: project_merge_request_path(project, merge_request))
+ expect(page).to have_link(pipeline.merge_request.source_branch,
+ href: project_commits_path(project, merge_request.source_branch))
+ expect(page).to have_link(pipeline.merge_request.target_branch,
+ href: project_commits_path(project, merge_request.target_branch))
+ end
+ end
+
+ context 'when source project is a forked project' do
+ let(:source_project) { fork_project(project, user, repository: true) }
+ let(:target_project) { project }
+
+ it 'shows merge request iid and source branch' do
+ visit project_job_path(source_project, job)
+
+ within '.js-pipeline-info' do
+ expect(page).to have_content("for !#{pipeline.merge_request.iid} " \
+ "with #{pipeline.merge_request.source_branch} " \
+ "into #{pipeline.merge_request.target_branch}")
+ expect(page).to have_link("!#{pipeline.merge_request.iid}",
+ href: project_merge_request_path(project, merge_request))
+ expect(page).to have_link(pipeline.merge_request.source_branch,
+ href: project_commits_path(source_project, merge_request.source_branch))
+ expect(page).to have_link(pipeline.merge_request.target_branch,
+ href: project_commits_path(project, merge_request.target_branch))
+ end
+ end
+ end
+ end
+ end
+
context 'sidebar', :js do
let(:job) { create(:ci_build, :success, :trace_live, pipeline: pipeline, name: '<img src=x onerror=alert(document.domain)>') }
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 36b8c15b8b6..9fdf78baa1e 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -1,6 +1,9 @@
require 'spec_helper'
describe 'Pipeline', :js do
+ include RoutesHelpers
+ include ProjectForksHelper
+
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:role) { :developer }
@@ -21,6 +24,11 @@ describe 'Pipeline', :js do
pipeline: pipeline, stage: 'test', name: 'test')
end
+ let!(:build_preparing) do
+ create(:ci_build, :preparing,
+ pipeline: pipeline, stage: 'deploy', name: 'prepare')
+ end
+
let!(:build_running) do
create(:ci_build, :running,
pipeline: pipeline, stage: 'deploy', name: 'deploy')
@@ -72,6 +80,15 @@ describe 'Pipeline', :js do
expect(page).to have_link(pipeline.ref)
end
+ it 'shows the pipeline information' do
+ within '.pipeline-info' do
+ expect(page).to have_content("#{pipeline.statuses.count} jobs " \
+ "for #{pipeline.ref} ")
+ expect(page).to have_link(pipeline.ref,
+ href: project_commits_path(pipeline.project, pipeline.ref))
+ end
+ end
+
it_behaves_like 'showing user status' do
let(:user_with_status) { pipeline.user }
@@ -97,6 +114,24 @@ describe 'Pipeline', :js do
end
end
+ context 'when pipeline has preparing builds' do
+ it 'shows a preparing icon and a cancel action' do
+ page.within('#ci-badge-prepare') do
+ expect(page).to have_selector('.js-ci-status-icon-preparing')
+ expect(page).to have_selector('.js-icon-cancel')
+ expect(page).to have_content('prepare')
+ end
+ end
+
+ it 'cancels the preparing build and shows retry button' do
+ find('#ci-badge-deploy .ci-action-icon-container').click
+
+ page.within('#ci-badge-deploy') do
+ expect(page).to have_css('.js-icon-retry')
+ end
+ end
+ end
+
context 'when pipeline has successful builds' do
it 'shows the success icon and a retry action for the successful build' do
page.within('#ci-badge-build') do
@@ -254,6 +289,113 @@ describe 'Pipeline', :js do
expect(page).to have_content(pipeline.ref)
end
end
+
+ context 'when pipeline is detached merge request pipeline' do
+ let(:source_project) { project }
+ let(:target_project) { project }
+
+ let(:merge_request) do
+ create(:merge_request,
+ :with_detached_merge_request_pipeline,
+ source_project: source_project,
+ target_project: target_project)
+ end
+
+ let(:pipeline) do
+ merge_request.all_pipelines.last
+ end
+
+ it 'shows the pipeline information' do
+ within '.pipeline-info' do
+ expect(page).to have_content("#{pipeline.statuses.count} jobs " \
+ "for !#{merge_request.iid} " \
+ "with #{merge_request.source_branch}")
+ expect(page).to have_link("!#{merge_request.iid}",
+ href: project_merge_request_path(project, merge_request))
+ expect(page).to have_link(merge_request.source_branch,
+ href: project_commits_path(merge_request.source_project, merge_request.source_branch))
+ end
+ end
+
+ context 'when source project is a forked project' do
+ let(:source_project) { fork_project(project, user, repository: true) }
+
+ before do
+ visit project_pipeline_path(source_project, pipeline)
+ end
+
+ it 'shows the pipeline information' do
+ within '.pipeline-info' do
+ expect(page).to have_content("#{pipeline.statuses.count} jobs " \
+ "for !#{merge_request.iid} " \
+ "with #{merge_request.source_branch}")
+ expect(page).to have_link("!#{merge_request.iid}",
+ href: project_merge_request_path(project, merge_request))
+ expect(page).to have_link(merge_request.source_branch,
+ href: project_commits_path(merge_request.source_project, merge_request.source_branch))
+ end
+ end
+ end
+ end
+
+ context 'when pipeline is merge request pipeline' do
+ let(:source_project) { project }
+ let(:target_project) { project }
+
+ let(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ source_project: source_project,
+ target_project: target_project,
+ merge_sha: project.commit.id)
+ end
+
+ let(:pipeline) do
+ merge_request.all_pipelines.last
+ end
+
+ before do
+ pipeline.update(user: user)
+ end
+
+ it 'shows the pipeline information' do
+ within '.pipeline-info' do
+ expect(page).to have_content("#{pipeline.statuses.count} jobs " \
+ "for !#{merge_request.iid} " \
+ "with #{merge_request.source_branch} " \
+ "into #{merge_request.target_branch}")
+ expect(page).to have_link("!#{merge_request.iid}",
+ href: project_merge_request_path(project, merge_request))
+ expect(page).to have_link(merge_request.source_branch,
+ href: project_commits_path(merge_request.source_project, merge_request.source_branch))
+ expect(page).to have_link(merge_request.target_branch,
+ href: project_commits_path(merge_request.target_project, merge_request.target_branch))
+ end
+ end
+
+ context 'when source project is a forked project' do
+ let(:source_project) { fork_project(project, user, repository: true) }
+
+ before do
+ visit project_pipeline_path(source_project, pipeline)
+ end
+
+ it 'shows the pipeline information' do
+ within '.pipeline-info' do
+ expect(page).to have_content("#{pipeline.statuses.count} jobs " \
+ "for !#{merge_request.iid} " \
+ "with #{merge_request.source_branch} " \
+ "into #{merge_request.target_branch}")
+ expect(page).to have_link("!#{merge_request.iid}",
+ href: project_merge_request_path(project, merge_request))
+ expect(page).to have_link(merge_request.source_branch,
+ href: project_commits_path(merge_request.source_project, merge_request.source_branch))
+ expect(page).to have_link(merge_request.target_branch,
+ href: project_commits_path(merge_request.target_project, merge_request.target_branch))
+ end
+ end
+ end
+ end
end
context 'when user does not have access to read jobs' do
@@ -686,9 +828,9 @@ describe 'Pipeline', :js do
visit project_pipeline_path(project, pipeline)
end
- it 'contains badge that indicates merge request pipeline' do
+ it 'contains badge that indicates detached merge request pipeline' do
page.within(all('.well-segment')[1]) do
- expect(page).to have_content 'merge request'
+ expect(page).to have_content 'detached'
end
end
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 88d7c9ef8bd..7ca3b3d8edd 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe 'Pipelines', :js do
+ include ProjectForksHelper
+
let(:project) { create(:project) }
context 'when user is logged in' do
@@ -165,6 +167,99 @@ describe 'Pipelines', :js do
end
end
+ context 'when pipeline is detached merge request pipeline' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_detached_merge_request_pipeline,
+ source_project: source_project,
+ target_project: target_project)
+ end
+
+ let!(:pipeline) { merge_request.all_pipelines.first }
+ let(:source_project) { project }
+ let(:target_project) { project }
+
+ before do
+ visit project_pipelines_path(source_project)
+ end
+
+ shared_examples_for 'showing detached merge request pipeline information' do
+ it 'shows detached tag for the pipeline' do
+ within '.pipeline-tags' do
+ expect(page).to have_content('detached')
+ end
+ end
+
+ it 'shows the link of the merge request' do
+ within '.branch-commit' do
+ expect(page).to have_link(merge_request.iid,
+ href: project_merge_request_path(project, merge_request))
+ end
+ end
+
+ it 'does not show the ref of the pipeline' do
+ within '.branch-commit' do
+ expect(page).not_to have_link(pipeline.ref)
+ end
+ end
+ end
+
+ it_behaves_like 'showing detached merge request pipeline information'
+
+ context 'when source project is a forked project' do
+ let(:source_project) { fork_project(project, user, repository: true) }
+
+ it_behaves_like 'showing detached merge request pipeline information'
+ end
+ end
+
+ context 'when pipeline is merge request pipeline' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ source_project: source_project,
+ target_project: target_project,
+ merge_sha: target_project.commit.sha)
+ end
+
+ let!(:pipeline) { merge_request.all_pipelines.first }
+ let(:source_project) { project }
+ let(:target_project) { project }
+
+ before do
+ visit project_pipelines_path(source_project)
+ end
+
+ shared_examples_for 'Correct merge request pipeline information' do
+ it 'does not show detached tag for the pipeline' do
+ within '.pipeline-tags' do
+ expect(page).not_to have_content('detached')
+ end
+ end
+
+ it 'shows the link of the merge request' do
+ within '.branch-commit' do
+ expect(page).to have_link(merge_request.iid,
+ href: project_merge_request_path(project, merge_request))
+ end
+ end
+
+ it 'does not show the ref of the pipeline' do
+ within '.branch-commit' do
+ expect(page).not_to have_link(pipeline.ref)
+ end
+ end
+ end
+
+ it_behaves_like 'Correct merge request pipeline information'
+
+ context 'when source project is a forked project' do
+ let(:source_project) { fork_project(project, user, repository: true) }
+
+ it_behaves_like 'Correct merge request pipeline information'
+ end
+ end
+
context 'when pipeline has configuration errors' do
let(:pipeline) do
create(:ci_pipeline, :invalid, project: project)
@@ -282,6 +377,30 @@ describe 'Pipelines', :js do
end
context 'for generic statuses' do
+ context 'when preparing' do
+ let!(:pipeline) do
+ create(:ci_empty_pipeline,
+ status: 'preparing', project: project)
+ end
+
+ let!(:status) do
+ create(:generic_commit_status,
+ :preparing, pipeline: pipeline)
+ end
+
+ before do
+ visit_project_pipelines
+ end
+
+ it 'is cancelable' do
+ expect(page).to have_selector('.js-pipelines-cancel-button')
+ end
+
+ it 'shows the pipeline as preparing' do
+ expect(page).to have_selector('.ci-preparing')
+ end
+ end
+
context 'when running' do
let!(:running) do
create(:generic_commit_status,
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index 4c85abe9971..bf0c0de89b2 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -110,6 +110,37 @@ describe "Projects > Settings > Pipelines settings" do
expect(page).not_to have_content('instance enabled')
end
end
+
+ context 'when auto devops is turned on group level' do
+ before do
+ project.update!(namespace: create(:group, :auto_devops_enabled))
+ end
+
+ it 'renders group enabled badge' do
+ visit project_settings_ci_cd_path(project)
+
+ page.within '#autodevops-settings' do
+ expect(page).to have_content('group enabled')
+ expect(find_field('project_auto_devops_attributes_enabled')).to be_checked
+ end
+ end
+ end
+
+ context 'when auto devops is turned on group parent level', :nested_groups do
+ before do
+ group = create(:group, parent: create(:group, :auto_devops_enabled))
+ project.update!(namespace: group)
+ end
+
+ it 'renders group enabled badge' do
+ visit project_settings_ci_cd_path(project)
+
+ page.within '#autodevops-settings' do
+ expect(page).to have_content('group enabled')
+ expect(find_field('project_auto_devops_attributes_enabled')).to be_checked
+ end
+ end
+ end
end
end
diff --git a/spec/features/projects/show/user_sees_git_instructions_spec.rb b/spec/features/projects/show/user_sees_git_instructions_spec.rb
index ffa80235083..0d59ef4a727 100644
--- a/spec/features/projects/show/user_sees_git_instructions_spec.rb
+++ b/spec/features/projects/show/user_sees_git_instructions_spec.rb
@@ -13,7 +13,7 @@ describe 'Projects > Show > User sees Git instructions' do
it 'shows Git command line instructions' do
click_link 'Create empty repository'
- page.within '.empty_wrapper' do
+ page.within '.empty-wrapper' do
expect(page).to have_content('Command line instructions')
end
end
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index dcca1d388c7..58bd20d7551 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -20,18 +20,18 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
visit project_path(project)
end
- it 'no Auto DevOps button if can not manage pipelines' do
- page.within('.project-buttons') do
- expect(page).not_to have_link('Enable Auto DevOps')
- expect(page).not_to have_link('Auto DevOps enabled')
- end
- end
-
- it '"Auto DevOps enabled" button not linked' do
+ it 'Project buttons are not visible' do
visit project_path(project)
page.within('.project-buttons') do
- expect(page).to have_text('Auto DevOps enabled')
+ expect(page).not_to have_link('New file')
+ expect(page).not_to have_link('Add README')
+ expect(page).not_to have_link('Add CHANGELOG')
+ expect(page).not_to have_link('Add CONTRIBUTING')
+ expect(page).not_to have_link('Enable Auto DevOps')
+ expect(page).not_to have_link('Auto DevOps enabled')
+ expect(page).not_to have_link('Add Kubernetes cluster')
+ expect(page).not_to have_link('Kubernetes configured')
end
end
end
@@ -61,46 +61,6 @@ describe 'Projects > Show > User sees setup shortcut buttons' do
expect(page).to have_link('Add license', href: presenter.add_license_path)
end
end
-
- describe 'Auto DevOps button' do
- context 'when Auto DevOps is enabled' do
- it '"Auto DevOps enabled" anchor linked to settings page' do
- visit project_path(project)
-
- page.within('.project-buttons') do
- expect(page).to have_link('Auto DevOps enabled', href: project_settings_ci_cd_path(project, anchor: 'autodevops-settings'))
- end
- end
- end
-
- context 'when Auto DevOps is not enabled' do
- let(:project) { create(:project, :public, :empty_repo, auto_devops_attributes: { enabled: false }) }
-
- it '"Enable Auto DevOps" button linked to settings page' do
- page.within('.project-buttons') do
- expect(page).to have_link('Enable Auto DevOps', href: project_settings_ci_cd_path(project, anchor: 'autodevops-settings'))
- end
- end
- end
- end
-
- describe 'Kubernetes cluster button' do
- it '"Add Kubernetes cluster" button linked to clusters page' do
- page.within('.project-buttons') do
- expect(page).to have_link('Add Kubernetes cluster', href: new_project_cluster_path(project))
- end
- end
-
- it '"Kubernetes cluster" anchor linked to cluster page' do
- cluster = create(:cluster, :provided_by_gcp, projects: [project])
-
- visit project_path(project)
-
- page.within('.project-buttons') do
- expect(page).to have_link('Kubernetes configured', href: project_cluster_path(project, cluster))
- end
- end
- end
end
end
diff --git a/spec/features/search/user_searches_for_users_spec.rb b/spec/features/search/user_searches_for_users_spec.rb
new file mode 100644
index 00000000000..3725143291d
--- /dev/null
+++ b/spec/features/search/user_searches_for_users_spec.rb
@@ -0,0 +1,83 @@
+require 'spec_helper'
+
+describe 'User searches for users' do
+ context 'when on the dashboard' do
+ it 'finds the user' do
+ create(:user, username: 'gob_bluth', name: 'Gob Bluth')
+
+ sign_in(create(:user))
+
+ visit dashboard_projects_path
+
+ fill_in 'search', with: 'gob'
+ click_button 'Go'
+
+ expect(page).to have_content('Users 1')
+
+ click_on('Users 1')
+
+ expect(page).to have_content('Gob Bluth')
+ expect(page).to have_content('@gob_bluth')
+ end
+ end
+
+ context 'when on the project page' do
+ it 'finds the user belonging to the project' do
+ project = create(:project)
+
+ user1 = create(:user, username: 'gob_bluth', name: 'Gob Bluth')
+ create(:project_member, :developer, user: user1, project: project)
+
+ user2 = create(:user, username: 'michael_bluth', name: 'Michael Bluth')
+ create(:project_member, :developer, user: user2, project: project)
+
+ create(:user, username: 'gob_2018', name: 'George Oscar Bluth')
+
+ sign_in(user1)
+
+ visit projects_path(project)
+
+ fill_in 'search', with: 'gob'
+ click_button 'Go'
+
+ expect(page).to have_content('Gob Bluth')
+ expect(page).to have_content('@gob_bluth')
+
+ expect(page).not_to have_content('Michael Bluth')
+ expect(page).not_to have_content('@michael_bluth')
+
+ expect(page).not_to have_content('George Oscar Bluth')
+ expect(page).not_to have_content('@gob_2018')
+ end
+ end
+
+ context 'when on the group page' do
+ it 'finds the user belonging to the group' do
+ group = create(:group)
+
+ user1 = create(:user, username: 'gob_bluth', name: 'Gob Bluth')
+ create(:group_member, :developer, user: user1, group: group)
+
+ user2 = create(:user, username: 'michael_bluth', name: 'Michael Bluth')
+ create(:group_member, :developer, user: user2, group: group)
+
+ create(:user, username: 'gob_2018', name: 'George Oscar Bluth')
+
+ sign_in(user1)
+
+ visit group_path(group)
+
+ fill_in 'search', with: 'gob'
+ click_button 'Go'
+
+ expect(page).to have_content('Gob Bluth')
+ expect(page).to have_content('@gob_bluth')
+
+ expect(page).not_to have_content('Michael Bluth')
+ expect(page).not_to have_content('@michael_bluth')
+
+ expect(page).not_to have_content('George Oscar Bluth')
+ expect(page).not_to have_content('@gob_2018')
+ end
+ end
+end
diff --git a/spec/features/security/group/private_access_spec.rb b/spec/features/security/group/private_access_spec.rb
index 3238e07fe15..a776169a8e5 100644
--- a/spec/features/security/group/private_access_spec.rb
+++ b/spec/features/security/group/private_access_spec.rb
@@ -27,7 +27,7 @@ describe 'Private Group access' do
it { is_expected.to be_allowed_for(:developer).of(group) }
it { is_expected.to be_allowed_for(:reporter).of(group) }
it { is_expected.to be_allowed_for(:guest).of(group) }
- it { is_expected.to be_denied_for(project_guest) }
+ it { is_expected.to be_allowed_for(project_guest) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
@@ -42,7 +42,7 @@ describe 'Private Group access' do
it { is_expected.to be_allowed_for(:developer).of(group) }
it { is_expected.to be_allowed_for(:reporter).of(group) }
it { is_expected.to be_allowed_for(:guest).of(group) }
- it { is_expected.to be_denied_for(project_guest) }
+ it { is_expected.to be_allowed_for(project_guest) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
@@ -58,7 +58,7 @@ describe 'Private Group access' do
it { is_expected.to be_allowed_for(:developer).of(group) }
it { is_expected.to be_allowed_for(:reporter).of(group) }
it { is_expected.to be_allowed_for(:guest).of(group) }
- it { is_expected.to be_denied_for(project_guest) }
+ it { is_expected.to be_allowed_for(project_guest) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
@@ -73,7 +73,7 @@ describe 'Private Group access' do
it { is_expected.to be_allowed_for(:developer).of(group) }
it { is_expected.to be_allowed_for(:reporter).of(group) }
it { is_expected.to be_allowed_for(:guest).of(group) }
- it { is_expected.to be_denied_for(project_guest) }
+ it { is_expected.to be_allowed_for(project_guest) }
it { is_expected.to be_denied_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
@@ -96,12 +96,9 @@ describe 'Private Group access' do
describe 'GET /groups/:path for shared projects' do
let(:project) { create(:project, :public) }
+
before do
- Projects::GroupLinks::CreateService.new(
- project,
- create(:user),
- link_group_access: ProjectGroupLink::DEVELOPER
- ).execute(group)
+ create(:project_group_link, project: project, group: group)
end
subject { group_path(group) }
diff --git a/spec/features/user_opens_link_to_comment.rb b/spec/features/user_opens_link_to_comment.rb
new file mode 100644
index 00000000000..f1e07e55799
--- /dev/null
+++ b/spec/features/user_opens_link_to_comment.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'User opens link to comment', :js do
+ let(:project) { create(:project, :public) }
+ let(:note) { create(:note_on_issue, project: project) }
+
+ context 'authenticated user' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'switches to all activity and does not show error message' do
+ create(:user_preference, user: user, issue_notes_filter: UserPreference::NOTES_FILTERS[:only_activity])
+
+ visit Gitlab::UrlBuilder.build(note)
+
+ expect(page.find('#discussion-filter-dropdown')).to have_content('Show all activity')
+ expect(page).not_to have_content('Something went wrong while fetching comments')
+ end
+ end
+
+ context 'anonymous user' do
+ it 'does not show error message' do
+ visit Gitlab::UrlBuilder.build(note)
+
+ expect(page).not_to have_content('Something went wrong while fetching comments')
+ end
+ end
+end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index ad856bd062e..368a814874f 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -434,16 +434,22 @@ describe 'Login' do
context 'within the grace period' do
it 'redirects to two-factor configuration page' do
- expect(authentication_metrics)
- .to increment(:user_authenticated_counter)
-
- gitlab_sign_in(user)
-
- expect(current_path).to eq profile_two_factor_auth_path
- expect(page).to have_content(
- 'The group settings for Group 1 and Group 2 require you to enable ' \
- 'Two-Factor Authentication for your account. You need to do this ' \
- 'before ')
+ Timecop.freeze do
+ expect(authentication_metrics)
+ .to increment(:user_authenticated_counter)
+
+ gitlab_sign_in(user)
+
+ expect(current_path).to eq profile_two_factor_auth_path
+ expect(page).to have_content(
+ 'The group settings for Group 1 and Group 2 require you to enable '\
+ 'Two-Factor Authentication for your account. '\
+ 'You can leave Group 1 and leave Group 2. '\
+ 'You need to do this '\
+ 'before '\
+ "#{(Time.zone.now + 2.days).strftime("%a, %-d %b %Y %H:%M:%S %z")}"
+ )
+ end
end
it 'allows skipping two-factor configuration', :js do
@@ -500,7 +506,8 @@ describe 'Login' do
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable ' \
- 'Two-Factor Authentication for your account.'
+ 'Two-Factor Authentication for your account. '\
+ 'You can leave Group 1 and leave Group 2.'
)
end
end
diff --git a/spec/finders/group_projects_finder_spec.rb b/spec/finders/group_projects_finder_spec.rb
index d6d95906f5e..f8fcc2d0e40 100644
--- a/spec/finders/group_projects_finder_spec.rb
+++ b/spec/finders/group_projects_finder_spec.rb
@@ -1,26 +1,7 @@
require 'spec_helper'
describe GroupProjectsFinder do
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
- let(:current_user) { create(:user) }
- let(:options) { {} }
-
- let(:finder) { described_class.new(group: group, current_user: current_user, options: options) }
-
- let!(:public_project) { create(:project, :public, group: group, path: '1') }
- let!(:private_project) { create(:project, :private, group: group, path: '2') }
- let!(:shared_project_1) { create(:project, :public, path: '3') }
- let!(:shared_project_2) { create(:project, :private, path: '4') }
- let!(:shared_project_3) { create(:project, :internal, path: '5') }
- let!(:subgroup_project) { create(:project, :public, path: '6', group: subgroup) }
- let!(:subgroup_private_project) { create(:project, :private, path: '7', group: subgroup) }
-
- before do
- shared_project_1.project_group_links.create(group_access: Gitlab::Access::MAINTAINER, group: group)
- shared_project_2.project_group_links.create(group_access: Gitlab::Access::MAINTAINER, group: group)
- shared_project_3.project_group_links.create(group_access: Gitlab::Access::MAINTAINER, group: group)
- end
+ include_context 'GroupProjectsFinder context'
subject { finder.execute }
@@ -144,6 +125,24 @@ describe GroupProjectsFinder do
end
end
+ describe 'with an admin current user' do
+ let(:current_user) { create(:admin) }
+
+ context "only shared" do
+ let(:options) { { only_shared: true } }
+ it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1]) }
+ end
+
+ context "only owned" do
+ let(:options) { { only_owned: true } }
+ it { is_expected.to eq([private_project, public_project]) }
+ end
+
+ context "all" do
+ it { is_expected.to eq([shared_project_3, shared_project_2, shared_project_1, private_project, public_project]) }
+ end
+ end
+
describe "no user" do
context "only shared" do
let(:options) { { only_shared: true } }
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 55efab7dec3..00b6cad1a66 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -1,45 +1,10 @@
require 'spec_helper'
describe IssuesFinder do
- set(:user) { create(:user) }
- set(:user2) { create(:user) }
- set(:group) { create(:group) }
- set(:subgroup) { create(:group, parent: group) }
- set(:project1) { create(:project, group: group) }
- set(:project2) { create(:project) }
- set(:project3) { create(:project, group: subgroup) }
- set(:milestone) { create(:milestone, project: project1) }
- set(:label) { create(:label, project: project2) }
- set(:issue1) { create(:issue, author: user, assignees: [user], project: project1, milestone: milestone, title: 'gitlab', created_at: 1.week.ago, updated_at: 1.week.ago) }
- set(:issue2) { create(:issue, author: user, assignees: [user], project: project2, description: 'gitlab', created_at: 1.week.from_now, updated_at: 1.week.from_now) }
- set(:issue3) { create(:issue, author: user2, assignees: [user2], project: project2, title: 'tanuki', description: 'tanuki', created_at: 2.weeks.from_now, updated_at: 2.weeks.from_now) }
- set(:issue4) { create(:issue, project: project3) }
- set(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: issue1) }
- set(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: issue2) }
- set(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: issue3) }
+ include_context 'IssuesFinder context'
describe '#execute' do
- let!(:closed_issue) { create(:issue, author: user2, assignees: [user2], project: project2, state: 'closed') }
- let!(:label_link) { create(:label_link, label: label, target: issue2) }
- let(:search_user) { user }
- let(:params) { {} }
- let(:issues) { described_class.new(search_user, params.reverse_merge(scope: scope, state: 'opened')).execute }
-
- before(:context) do
- project1.add_maintainer(user)
- project2.add_developer(user)
- project2.add_developer(user2)
- project3.add_developer(user)
-
- issue1
- issue2
- issue3
- issue4
-
- award_emoji1
- award_emoji2
- award_emoji3
- end
+ include_context 'IssuesFinder#execute context'
context 'scope: all' do
let(:scope) { 'all' }
@@ -56,6 +21,21 @@ describe IssuesFinder do
end
end
+ context 'filtering by assignee usernames' do
+ set(:user3) { create(:user) }
+ let(:params) { { assignee_username: [user2.username, user3.username] } }
+
+ before do
+ project2.add_developer(user3)
+
+ issue3.assignees = [user2, user3]
+ end
+
+ it 'returns issues assigned to those users' do
+ expect(issues).to contain_exactly(issue3)
+ end
+ end
+
context 'filtering by no assignee' do
let(:params) { { assignee_id: 'None' } }
@@ -220,6 +200,7 @@ describe IssuesFinder do
let(:yesterday) { Date.today - 1.day }
let(:tomorrow) { Date.today + 1.day }
let(:two_days_ago) { Date.today - 2.days }
+ let(:three_days_ago) { Date.today - 3.days }
let(:milestones) do
[
@@ -227,6 +208,8 @@ describe IssuesFinder do
create(:milestone, project: project_started_1_and_2, title: '1.0', start_date: two_days_ago),
create(:milestone, project: project_started_1_and_2, title: '2.0', start_date: yesterday),
create(:milestone, project: project_started_1_and_2, title: '3.0', start_date: tomorrow),
+ create(:milestone, :closed, project: project_started_1_and_2, title: '4.0', start_date: three_days_ago),
+ create(:milestone, :closed, project: project_started_8, title: '6.0', start_date: three_days_ago),
create(:milestone, project: project_started_8, title: '7.0'),
create(:milestone, project: project_started_8, title: '8.0', start_date: yesterday),
create(:milestone, project: project_started_8, title: '9.0', start_date: tomorrow)
@@ -640,6 +623,16 @@ describe IssuesFinder do
expect(subject).to include(public_issue, confidential_issue)
end
end
+
+ context 'for an admin' do
+ let(:admin_user) { create(:user, :admin) }
+
+ subject { described_class.new(admin_user, params).with_confidentiality_access_check }
+
+ it 'returns all issues' do
+ expect(subject).to include(public_issue, confidential_issue)
+ end
+ end
end
context 'when searching within a specific project' do
@@ -707,6 +700,22 @@ describe IssuesFinder do
subject
end
end
+
+ context 'for an admin' do
+ let(:admin_user) { create(:user, :admin) }
+
+ subject { described_class.new(admin_user, params).with_confidentiality_access_check }
+
+ it 'returns all issues' do
+ expect(subject).to include(public_issue, confidential_issue)
+ end
+
+ it 'does not filter by confidentiality' do
+ expect(Issue).not_to receive(:where).with(a_string_matching('confidential'), anything)
+
+ subject
+ end
+ end
end
end
diff --git a/spec/finders/labels_finder_spec.rb b/spec/finders/labels_finder_spec.rb
index 9abc52aa664..3f060ba0553 100644
--- a/spec/finders/labels_finder_spec.rb
+++ b/spec/finders/labels_finder_spec.rb
@@ -209,6 +209,12 @@ describe LabelsFinder do
expect(finder.execute).to eq [project_label_1]
end
+
+ it 'returns labels matching a single character' do
+ finder = described_class.new(user, search: '(')
+
+ expect(finder.execute).to eq [group_label_1]
+ end
end
context 'filter by subscription' do
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 503b88fcbad..56136eb84bc 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -1,72 +1,24 @@
require 'spec_helper'
describe MergeRequestsFinder do
- include ProjectForksHelper
-
- # We need to explicitly permit Gitaly N+1s because of the specs that use
- # :request_store. Gitaly N+1 detection is only enabled when :request_store is,
- # but we don't care about potential N+1s when we're just creating several
- # projects in the setup phase.
- def create_project_without_n_plus_1(*args)
- Gitlab::GitalyClient.allow_n_plus_1_calls do
- create(:project, :public, *args)
- end
- end
-
context "multiple projects with merge requests" do
- let(:user) { create :user }
- let(:user2) { create :user }
-
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
- let(:project1) { create_project_without_n_plus_1(group: group) }
- let(:project2) do
- Gitlab::GitalyClient.allow_n_plus_1_calls do
- fork_project(project1, user)
- end
- end
- let(:project3) do
- Gitlab::GitalyClient.allow_n_plus_1_calls do
- p = fork_project(project1, user)
- p.update!(archived: true)
- p
- end
- end
- let(:project4) { create_project_without_n_plus_1(:repository, group: subgroup) }
- let(:project5) { create_project_without_n_plus_1(group: subgroup) }
- let(:project6) { create_project_without_n_plus_1(group: subgroup) }
-
- let!(:merge_request1) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project1) }
- let!(:merge_request2) { create(:merge_request, :conflict, author: user, source_project: project2, target_project: project1, state: 'closed') }
- let!(:merge_request3) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project2, state: 'locked', title: 'thing WIP thing') }
- let!(:merge_request4) { create(:merge_request, :simple, author: user, source_project: project3, target_project: project3, title: 'WIP thing') }
- let!(:merge_request5) { create(:merge_request, :simple, author: user, source_project: project4, target_project: project4, title: '[WIP]') }
- let!(:merge_request6) { create(:merge_request, :simple, author: user, source_project: project5, target_project: project5, title: 'WIP: thing') }
- let!(:merge_request7) { create(:merge_request, :simple, author: user, source_project: project6, target_project: project6, title: 'wip thing') }
- let!(:merge_request8) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project1, title: '[wip] thing') }
- let!(:merge_request9) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project2, title: 'wip: thing') }
-
- before do
- project1.add_maintainer(user)
- project2.add_developer(user)
- project3.add_developer(user)
- project2.add_developer(user2)
- project4.add_developer(user)
- project5.add_developer(user)
- project6.add_developer(user)
- end
+ include_context 'MergeRequestsFinder multiple projects with merge requests context'
describe '#execute' do
it 'filters by scope' do
params = { scope: 'authored', state: 'opened' }
+
merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(7)
+
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request4, merge_request5)
end
it 'filters by project' do
params = { project_id: project1.id, scope: 'authored', state: 'opened' }
+
merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(2)
+
+ expect(merge_requests).to contain_exactly(merge_request1)
end
it 'filters by commit sha' do
@@ -79,24 +31,15 @@ describe MergeRequestsFinder do
end
context 'filtering by group' do
- it 'includes all merge requests when user has access' do
- params = { group_id: group.id }
-
- merge_requests = described_class.new(user, params).execute
-
- expect(merge_requests.size).to eq(3)
- end
-
- it 'excludes merge requests from projects the user does not have access to' do
- private_project = create_project_without_n_plus_1(:private, group: group)
- private_mr = create(:merge_request, :simple, author: user, source_project: private_project, target_project: private_project)
+ it 'includes all merge requests when user has access exceluding merge requests from projects the user does not have access to' do
+ private_project = allow_gitaly_n_plus_1 { create(:project, :private, group: group) }
+ private_project.add_guest(user)
+ create(:merge_request, :simple, author: user, source_project: private_project, target_project: private_project)
params = { group_id: group.id }
- private_project.add_guest(user)
merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(3)
- expect(merge_requests).not_to include(private_mr)
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2)
end
it 'filters by group including subgroups', :nested_groups do
@@ -104,14 +47,16 @@ describe MergeRequestsFinder do
merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(6)
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request5)
end
end
it 'filters by non_archived' do
params = { non_archived: true }
+
merge_requests = described_class.new(user, params).execute
- expect(merge_requests.size).to eq(8)
+
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3, merge_request5)
end
it 'filters by iid' do
@@ -146,41 +91,45 @@ describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(merge_request3)
end
- it 'filters by wip' do
- params = { wip: 'yes' }
+ describe 'WIP state' do
+ let!(:wip_merge_request1) { create(:merge_request, :simple, author: user, source_project: project5, target_project: project5, title: 'WIP: thing') }
+ let!(:wip_merge_request2) { create(:merge_request, :simple, author: user, source_project: project6, target_project: project6, title: 'wip thing') }
+ let!(:wip_merge_request3) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project1, title: '[wip] thing') }
+ let!(:wip_merge_request4) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project2, title: 'wip: thing') }
- merge_requests = described_class.new(user, params).execute
+ it 'filters by wip' do
+ params = { wip: 'yes' }
- expect(merge_requests).to contain_exactly(merge_request4, merge_request5, merge_request6, merge_request7, merge_request8, merge_request9)
- end
+ merge_requests = described_class.new(user, params).execute
- it 'filters by not wip' do
- params = { wip: 'no' }
+ expect(merge_requests).to contain_exactly(merge_request4, merge_request5, wip_merge_request1, wip_merge_request2, wip_merge_request3, wip_merge_request4)
+ end
- merge_requests = described_class.new(user, params).execute
+ it 'filters by not wip' do
+ params = { wip: 'no' }
- expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3)
- end
+ merge_requests = described_class.new(user, params).execute
- it 'returns all items if no valid wip param exists' do
- params = { wip: '' }
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3)
+ end
- merge_requests = described_class.new(user, params).execute
+ it 'returns all items if no valid wip param exists' do
+ params = { wip: '' }
- expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3, merge_request4, merge_request5, merge_request6, merge_request7, merge_request8, merge_request9)
- end
+ merge_requests = described_class.new(user, params).execute
- it 'adds wip to scalar params' do
- scalar_params = described_class.scalar_params
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3, merge_request4, merge_request5, wip_merge_request1, wip_merge_request2, wip_merge_request3, wip_merge_request4)
+ end
+
+ it 'adds wip to scalar params' do
+ scalar_params = described_class.scalar_params
- expect(scalar_params).to include(:wip, :assignee_id)
+ expect(scalar_params).to include(:wip, :assignee_id)
+ end
end
context 'filtering by group milestone' do
- let!(:group) { create(:group, :public) }
let(:group_milestone) { create(:milestone, group: group) }
- let!(:group_member) { create(:group_member, group: group, user: user) }
- let(:params) { { milestone_title: group_milestone.title } }
before do
project2.update(namespace: group)
@@ -188,7 +137,9 @@ describe MergeRequestsFinder do
merge_request3.update(milestone: group_milestone)
end
- it 'returns issues assigned to that group milestone' do
+ it 'returns merge requests assigned to that group milestone' do
+ params = { milestone_title: group_milestone.title }
+
merge_requests = described_class.new(user, params).execute
expect(merge_requests).to contain_exactly(merge_request2, merge_request3)
@@ -285,7 +236,7 @@ describe MergeRequestsFinder do
it 'returns the number of rows for the default state' do
finder = described_class.new(user)
- expect(finder.row_count).to eq(7)
+ expect(finder.row_count).to eq(3)
end
it 'returns the number of rows for a given state' do
diff --git a/spec/finders/snippets_finder_spec.rb b/spec/finders/snippets_finder_spec.rb
index 134fb5f2c04..93287f3e9b8 100644
--- a/spec/finders/snippets_finder_spec.rb
+++ b/spec/finders/snippets_finder_spec.rb
@@ -2,7 +2,6 @@ require 'spec_helper'
describe SnippetsFinder do
include Gitlab::Allowable
- using RSpec::Parameterized::TableSyntax
describe '#initialize' do
it 'raises ArgumentError when a project and author are given' do
@@ -14,174 +13,142 @@ describe SnippetsFinder do
end
end
- context 'filter by scope' do
- let(:user) { create :user }
- let!(:snippet1) { create(:personal_snippet, :private, author: user) }
- let!(:snippet2) { create(:personal_snippet, :internal, author: user) }
- let!(:snippet3) { create(:personal_snippet, :public, author: user) }
-
- it "returns all snippets for 'all' scope" do
- snippets = described_class.new(user, scope: :all).execute
-
- expect(snippets).to include(snippet1, snippet2, snippet3)
- end
-
- it "returns all snippets for 'are_private' scope" do
- snippets = described_class.new(user, scope: :are_private).execute
+ describe '#execute' do
+ set(:user) { create(:user) }
+ set(:private_personal_snippet) { create(:personal_snippet, :private, author: user) }
+ set(:internal_personal_snippet) { create(:personal_snippet, :internal, author: user) }
+ set(:public_personal_snippet) { create(:personal_snippet, :public, author: user) }
- expect(snippets).to include(snippet1)
- expect(snippets).not_to include(snippet2, snippet3)
- end
+ context 'filter by scope' do
+ it "returns all snippets for 'all' scope" do
+ snippets = described_class.new(user, scope: :all).execute
- it "returns all snippets for 'are_internal' scope" do
- snippets = described_class.new(user, scope: :are_internal).execute
+ expect(snippets).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet)
+ end
- expect(snippets).to include(snippet2)
- expect(snippets).not_to include(snippet1, snippet3)
- end
+ it "returns all snippets for 'are_private' scope" do
+ snippets = described_class.new(user, scope: :are_private).execute
- it "returns all snippets for 'are_private' scope" do
- snippets = described_class.new(user, scope: :are_public).execute
+ expect(snippets).to contain_exactly(private_personal_snippet)
+ end
- expect(snippets).to include(snippet3)
- expect(snippets).not_to include(snippet1, snippet2)
- end
- end
+ it "returns all snippets for 'are_internal' scope" do
+ snippets = described_class.new(user, scope: :are_internal).execute
- context 'filter by author' do
- let(:user) { create :user }
- let(:user1) { create :user }
- let!(:snippet1) { create(:personal_snippet, :private, author: user) }
- let!(:snippet2) { create(:personal_snippet, :internal, author: user) }
- let!(:snippet3) { create(:personal_snippet, :public, author: user) }
+ expect(snippets).to contain_exactly(internal_personal_snippet)
+ end
- it "returns all public and internal snippets" do
- snippets = described_class.new(user1, author: user).execute
+ it "returns all snippets for 'are_private' scope" do
+ snippets = described_class.new(user, scope: :are_public).execute
- expect(snippets).to include(snippet2, snippet3)
- expect(snippets).not_to include(snippet1)
+ expect(snippets).to contain_exactly(public_personal_snippet)
+ end
end
- it "returns internal snippets" do
- snippets = described_class.new(user, author: user, scope: :are_internal).execute
+ context 'filter by author' do
+ it 'returns all public and internal snippets' do
+ snippets = described_class.new(create(:user), author: user).execute
- expect(snippets).to include(snippet2)
- expect(snippets).not_to include(snippet1, snippet3)
- end
+ expect(snippets).to contain_exactly(internal_personal_snippet, public_personal_snippet)
+ end
- it "returns private snippets" do
- snippets = described_class.new(user, author: user, scope: :are_private).execute
+ it 'returns internal snippets' do
+ snippets = described_class.new(user, author: user, scope: :are_internal).execute
- expect(snippets).to include(snippet1)
- expect(snippets).not_to include(snippet2, snippet3)
- end
+ expect(snippets).to contain_exactly(internal_personal_snippet)
+ end
- it "returns public snippets" do
- snippets = described_class.new(user, author: user, scope: :are_public).execute
+ it 'returns private snippets' do
+ snippets = described_class.new(user, author: user, scope: :are_private).execute
- expect(snippets).to include(snippet3)
- expect(snippets).not_to include(snippet1, snippet2)
- end
+ expect(snippets).to contain_exactly(private_personal_snippet)
+ end
- it "returns all snippets" do
- snippets = described_class.new(user, author: user).execute
+ it 'returns public snippets' do
+ snippets = described_class.new(user, author: user, scope: :are_public).execute
- expect(snippets).to include(snippet1, snippet2, snippet3)
- end
+ expect(snippets).to contain_exactly(public_personal_snippet)
+ end
- it "returns only public snippets if unauthenticated user" do
- snippets = described_class.new(nil, author: user).execute
+ it 'returns all snippets' do
+ snippets = described_class.new(user, author: user).execute
- expect(snippets).to include(snippet3)
- expect(snippets).not_to include(snippet2, snippet1)
- end
+ expect(snippets).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet)
+ end
- it 'returns all snippets for an admin' do
- admin = create(:user, :admin)
- snippets = described_class.new(admin, author: user).execute
+ it 'returns only public snippets if unauthenticated user' do
+ snippets = described_class.new(nil, author: user).execute
- expect(snippets).to include(snippet1, snippet2, snippet3)
- end
- end
+ expect(snippets).to contain_exactly(public_personal_snippet)
+ end
- context 'filter by project' do
- let(:user) { create :user }
- let(:group) { create :group, :public }
- let(:project1) { create(:project, :public, group: group) }
+ it 'returns all snippets for an admin' do
+ admin = create(:user, :admin)
+ snippets = described_class.new(admin, author: user).execute
- before do
- @snippet1 = create(:project_snippet, :private, project: project1)
- @snippet2 = create(:project_snippet, :internal, project: project1)
- @snippet3 = create(:project_snippet, :public, project: project1)
+ expect(snippets).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet)
+ end
end
- it "returns public snippets for unauthorized user" do
- snippets = described_class.new(nil, project: project1).execute
+ context 'project snippets' do
+ let(:group) { create(:group, :public) }
+ let(:project) { create(:project, :public, group: group) }
+ let!(:private_project_snippet) { create(:project_snippet, :private, project: project) }
+ let!(:internal_project_snippet) { create(:project_snippet, :internal, project: project) }
+ let!(:public_project_snippet) { create(:project_snippet, :public, project: project) }
- expect(snippets).to include(@snippet3)
- expect(snippets).not_to include(@snippet1, @snippet2)
- end
+ it 'returns public personal and project snippets for unauthorized user' do
+ snippets = described_class.new(nil, project: project).execute
- it "returns public and internal snippets for non project members" do
- snippets = described_class.new(user, project: project1).execute
+ expect(snippets).to contain_exactly(public_project_snippet)
+ end
- expect(snippets).to include(@snippet2, @snippet3)
- expect(snippets).not_to include(@snippet1)
- end
+ it 'returns public and internal snippets for non project members' do
+ snippets = described_class.new(user, project: project).execute
- it "returns public snippets for non project members" do
- snippets = described_class.new(user, project: project1, scope: :are_public).execute
+ expect(snippets).to contain_exactly(internal_project_snippet, public_project_snippet)
+ end
- expect(snippets).to include(@snippet3)
- expect(snippets).not_to include(@snippet1, @snippet2)
- end
+ it 'returns public snippets for non project members' do
+ snippets = described_class.new(user, project: project, scope: :are_public).execute
- it "returns internal snippets for non project members" do
- snippets = described_class.new(user, project: project1, scope: :are_internal).execute
+ expect(snippets).to contain_exactly(public_project_snippet)
+ end
- expect(snippets).to include(@snippet2)
- expect(snippets).not_to include(@snippet1, @snippet3)
- end
+ it 'returns internal snippets for non project members' do
+ snippets = described_class.new(user, project: project, scope: :are_internal).execute
- it "does not return private snippets for non project members" do
- snippets = described_class.new(user, project: project1, scope: :are_private).execute
+ expect(snippets).to contain_exactly(internal_project_snippet)
+ end
- expect(snippets).not_to include(@snippet1, @snippet2, @snippet3)
- end
+ it 'does not return private snippets for non project members' do
+ snippets = described_class.new(user, project: project, scope: :are_private).execute
- it "returns all snippets for project members" do
- project1.add_developer(user)
+ expect(snippets).to be_empty
+ end
- snippets = described_class.new(user, project: project1).execute
+ it 'returns all snippets for project members' do
+ project.add_developer(user)
- expect(snippets).to include(@snippet1, @snippet2, @snippet3)
- end
+ snippets = described_class.new(user, project: project).execute
- it "returns private snippets for project members" do
- project1.add_developer(user)
+ expect(snippets).to contain_exactly(private_project_snippet, internal_project_snippet, public_project_snippet)
+ end
- snippets = described_class.new(user, project: project1, scope: :are_private).execute
+ it 'returns private snippets for project members' do
+ project.add_developer(user)
- expect(snippets).to include(@snippet1)
- end
+ snippets = described_class.new(user, project: project, scope: :are_private).execute
- it 'returns all snippets for an admin' do
- admin = create(:user, :admin)
- snippets = described_class.new(admin, project: project1).execute
+ expect(snippets).to contain_exactly(private_project_snippet)
+ end
- expect(snippets).to include(@snippet1, @snippet2, @snippet3)
- end
- end
+ it 'returns all snippets for an admin' do
+ admin = create(:user, :admin)
+ snippets = described_class.new(admin, project: project).execute
- describe '#execute' do
- let(:project) { create(:project, :public) }
- let!(:project_snippet) { create(:project_snippet, :public, project: project) }
- let!(:personal_snippet) { create(:personal_snippet, :public) }
- let(:user) { create(:user) }
- subject(:finder) { described_class.new(user) }
-
- it 'returns project- and personal snippets' do
- expect(finder.execute).to contain_exactly(project_snippet, personal_snippet)
+ expect(snippets).to contain_exactly(private_project_snippet, internal_project_snippet, public_project_snippet)
+ end
end
context 'when the user cannot read cross project' do
@@ -191,7 +158,7 @@ describe SnippetsFinder do
end
it 'returns only personal snippets when the user cannot read cross project' do
- expect(finder.execute).to contain_exactly(personal_snippet)
+ expect(described_class.new(user).execute).to contain_exactly(private_personal_snippet, internal_personal_snippet, public_personal_snippet)
end
end
end
diff --git a/spec/finders/users_finder_spec.rb b/spec/finders/users_finder_spec.rb
index fecf97dc641..d71d3c99272 100644
--- a/spec/finders/users_finder_spec.rb
+++ b/spec/finders/users_finder_spec.rb
@@ -2,10 +2,7 @@ require 'spec_helper'
describe UsersFinder do
describe '#execute' do
- let!(:user1) { create(:user, username: 'johndoe') }
- let!(:user2) { create(:user, :blocked, username: 'notsorandom') }
- let!(:external_user) { create(:user, :external) }
- let!(:omniauth_user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
+ include_context 'UsersFinder#execute filter by project context'
context 'with a normal user' do
let(:user) { create(:user) }
@@ -13,43 +10,43 @@ describe UsersFinder do
it 'returns all users' do
users = described_class.new(user).execute
- expect(users).to contain_exactly(user, user1, user2, omniauth_user)
+ expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user)
end
it 'filters by username' do
users = described_class.new(user, username: 'johndoe').execute
- expect(users).to contain_exactly(user1)
+ expect(users).to contain_exactly(normal_user)
end
it 'filters by username (case insensitive)' do
users = described_class.new(user, username: 'joHNdoE').execute
- expect(users).to contain_exactly(user1)
+ expect(users).to contain_exactly(normal_user)
end
it 'filters by search' do
users = described_class.new(user, search: 'orando').execute
- expect(users).to contain_exactly(user2)
+ expect(users).to contain_exactly(blocked_user)
end
it 'filters by blocked users' do
users = described_class.new(user, blocked: true).execute
- expect(users).to contain_exactly(user2)
+ expect(users).to contain_exactly(blocked_user)
end
it 'filters by active users' do
users = described_class.new(user, active: true).execute
- expect(users).to contain_exactly(user, user1, omniauth_user)
+ expect(users).to contain_exactly(user, normal_user, omniauth_user)
end
it 'returns no external users' do
users = described_class.new(user, external: true).execute
- expect(users).to contain_exactly(user, user1, user2, omniauth_user)
+ expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user)
end
it 'filters by created_at' do
@@ -69,7 +66,7 @@ describe UsersFinder do
custom_attributes: { foo: 'bar' }
).execute
- expect(users).to contain_exactly(user, user1, user2, omniauth_user)
+ expect(users).to contain_exactly(user, normal_user, blocked_user, omniauth_user)
end
end
@@ -85,20 +82,20 @@ describe UsersFinder do
it 'returns all users' do
users = described_class.new(admin).execute
- expect(users).to contain_exactly(admin, user1, user2, external_user, omniauth_user)
+ expect(users).to contain_exactly(admin, normal_user, blocked_user, external_user, omniauth_user)
end
it 'filters by custom attributes' do
- create :user_custom_attribute, user: user1, key: 'foo', value: 'foo'
- create :user_custom_attribute, user: user1, key: 'bar', value: 'bar'
- create :user_custom_attribute, user: user2, key: 'foo', value: 'foo'
+ create :user_custom_attribute, user: normal_user, key: 'foo', value: 'foo'
+ create :user_custom_attribute, user: normal_user, key: 'bar', value: 'bar'
+ create :user_custom_attribute, user: blocked_user, key: 'foo', value: 'foo'
users = described_class.new(
admin,
custom_attributes: { foo: 'foo', bar: 'bar' }
).execute
- expect(users).to contain_exactly(user1)
+ expect(users).to contain_exactly(normal_user)
end
end
end
diff --git a/spec/fixtures/api/schemas/board.json b/spec/fixtures/api/schemas/board.json
index 03aca4a3cc0..7c146647948 100644
--- a/spec/fixtures/api/schemas/board.json
+++ b/spec/fixtures/api/schemas/board.json
@@ -6,6 +6,5 @@
"properties" : {
"id": { "type": "integer" },
"name": { "type": "string" }
- },
- "additionalProperties": false
+ }
}
diff --git a/spec/fixtures/api/schemas/entities/issue.json b/spec/fixtures/api/schemas/entities/issue.json
index 00abe73ec8a..162fb9c8108 100644
--- a/spec/fixtures/api/schemas/entities/issue.json
+++ b/spec/fixtures/api/schemas/entities/issue.json
@@ -38,6 +38,5 @@
"items": { "$ref": "label.json" }
},
"assignees": { "type": ["array", "null"] }
- },
- "additionalProperties": false
+ }
}
diff --git a/spec/fixtures/api/schemas/entities/issue_boards.json b/spec/fixtures/api/schemas/entities/issue_boards.json
index 0ac1d9468c8..742f7be5485 100644
--- a/spec/fixtures/api/schemas/entities/issue_boards.json
+++ b/spec/fixtures/api/schemas/entities/issue_boards.json
@@ -10,6 +10,5 @@
"items": { "$ref": "issue_board.json" }
},
"size": { "type": "integer" }
- },
- "additionalProperties": false
+ }
}
diff --git a/spec/fixtures/api/schemas/entities/merge_request_widget.json b/spec/fixtures/api/schemas/entities/merge_request_widget.json
index 67c209f3fc3..6b1cd60c25d 100644
--- a/spec/fixtures/api/schemas/entities/merge_request_widget.json
+++ b/spec/fixtures/api/schemas/entities/merge_request_widget.json
@@ -52,6 +52,7 @@
"mergeable_discussions_state": { "type": "boolean" },
"conflicts_can_be_resolved_in_ui": { "type": "boolean" },
"branch_missing": { "type": "boolean" },
+ "commits_count": { "type": ["integer", "null"] },
"has_conflicts": { "type": "boolean" },
"can_be_merged": { "type": "boolean" },
"mergeable": { "type": "boolean" },
@@ -125,6 +126,5 @@
"can_receive_suggestion": { "type": "boolean" },
"source_branch_protected": { "type": "boolean" },
"conflicts_docs_path": { "type": ["string", "null"] }
- },
- "additionalProperties": false
+ }
}
diff --git a/spec/fixtures/api/schemas/issue.json b/spec/fixtures/api/schemas/issue.json
index a83ec55cede..77de9ae4f9f 100644
--- a/spec/fixtures/api/schemas/issue.json
+++ b/spec/fixtures/api/schemas/issue.json
@@ -28,7 +28,7 @@
"items": { "$ref": "entities/label.json" }
},
"assignee": {
- "id": { "type": "integet" },
+ "id": { "type": "integer" },
"name": { "type": "string" },
"username": { "type": "string" },
"avatar_url": { "type": "uri" }
@@ -52,6 +52,5 @@
}
},
"subscribed": { "type": ["boolean", "null"] }
- },
- "additionalProperties": false
+ }
}
diff --git a/spec/fixtures/api/schemas/issues.json b/spec/fixtures/api/schemas/issues.json
index 70771b21c96..fbcd9eea389 100644
--- a/spec/fixtures/api/schemas/issues.json
+++ b/spec/fixtures/api/schemas/issues.json
@@ -10,6 +10,5 @@
"items": { "$ref": "issue.json" }
},
"size": { "type": "integer" }
- },
- "additionalProperties": false
+ }
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/merge_request.json b/spec/fixtures/api/schemas/public_api/v4/merge_request.json
new file mode 100644
index 00000000000..cd50be00418
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/merge_request.json
@@ -0,0 +1,124 @@
+{
+ "type": "object",
+ "properties" : {
+ "properties" : {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "project_id": { "type": "integer" },
+ "title": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "state": { "type": "string" },
+ "merged_by": {
+ "type": ["object", "null"],
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" }
+ },
+ "additionalProperties": false
+ },
+ "merged_at": { "type": ["date", "null"] },
+ "closed_by": {
+ "type": ["object", "null"],
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" }
+ },
+ "additionalProperties": false
+ },
+ "closed_at": { "type": ["date", "null"] },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "target_branch": { "type": "string" },
+ "source_branch": { "type": "string" },
+ "upvotes": { "type": "integer" },
+ "downvotes": { "type": "integer" },
+ "author": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" }
+ },
+ "additionalProperties": false
+ },
+ "assignee": {
+ "type": ["object", "null"],
+ "properties": {
+ "name": { "type": "string" },
+ "username": { "type": "string" },
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "uri" },
+ "web_url": { "type": "uri" }
+ },
+ "additionalProperties": false
+ },
+ "source_project_id": { "type": "integer" },
+ "target_project_id": { "type": "integer" },
+ "labels": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "work_in_progress": { "type": "boolean" },
+ "milestone": {
+ "type": ["object", "null"],
+ "properties": {
+ "id": { "type": "integer" },
+ "iid": { "type": "integer" },
+ "project_id": { "type": ["integer", "null"] },
+ "group_id": { "type": ["integer", "null"] },
+ "title": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "state": { "type": "string" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "due_date": { "type": "date" },
+ "start_date": { "type": "date" }
+ },
+ "additionalProperties": false
+ },
+ "merge_when_pipeline_succeeds": { "type": "boolean" },
+ "merge_status": { "type": "string" },
+ "sha": { "type": "string" },
+ "merge_commit_sha": { "type": ["string", "null"] },
+ "user_notes_count": { "type": "integer" },
+ "changes_count": { "type": "string" },
+ "should_remove_source_branch": { "type": ["boolean", "null"] },
+ "force_remove_source_branch": { "type": ["boolean", "null"] },
+ "discussion_locked": { "type": ["boolean", "null"] },
+ "web_url": { "type": "uri" },
+ "squash": { "type": "boolean" },
+ "time_stats": {
+ "time_estimate": { "type": "integer" },
+ "total_time_spent": { "type": "integer" },
+ "human_time_estimate": { "type": ["string", "null"] },
+ "human_total_time_spent": { "type": ["string", "null"] }
+ },
+ "allow_collaboration": { "type": ["boolean", "null"] },
+ "allow_maintainer_to_push": { "type": ["boolean", "null"] }
+ },
+ "required": [
+ "id", "iid", "project_id", "title", "description",
+ "state", "created_at", "updated_at", "target_branch",
+ "source_branch", "upvotes", "downvotes", "author",
+ "assignee", "source_project_id", "target_project_id",
+ "labels", "work_in_progress", "milestone", "merge_when_pipeline_succeeds",
+ "merge_status", "sha", "merge_commit_sha", "user_notes_count",
+ "should_remove_source_branch", "force_remove_source_branch",
+ "web_url", "squash"
+ ]
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/merge_requests.json b/spec/fixtures/api/schemas/public_api/v4/merge_requests.json
index 6df27bf32b9..b35c83950e8 100644
--- a/spec/fixtures/api/schemas/public_api/v4/merge_requests.json
+++ b/spec/fixtures/api/schemas/public_api/v4/merge_requests.json
@@ -1,126 +1,6 @@
{
"type": "array",
"items": {
- "type": "object",
- "properties" : {
- "id": { "type": "integer" },
- "iid": { "type": "integer" },
- "project_id": { "type": "integer" },
- "title": { "type": "string" },
- "description": { "type": ["string", "null"] },
- "state": { "type": "string" },
- "merged_by": {
- "type": ["object", "null"],
- "properties": {
- "name": { "type": "string" },
- "username": { "type": "string" },
- "id": { "type": "integer" },
- "state": { "type": "string" },
- "avatar_url": { "type": "uri" },
- "web_url": { "type": "uri" }
- },
- "additionalProperties": false
- },
- "merged_at": { "type": ["date", "null"] },
- "closed_by": {
- "type": ["object", "null"],
- "properties": {
- "name": { "type": "string" },
- "username": { "type": "string" },
- "id": { "type": "integer" },
- "state": { "type": "string" },
- "avatar_url": { "type": "uri" },
- "web_url": { "type": "uri" }
- },
- "additionalProperties": false
- },
- "closed_at": { "type": ["date", "null"] },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
- "target_branch": { "type": "string" },
- "source_branch": { "type": "string" },
- "upvotes": { "type": "integer" },
- "downvotes": { "type": "integer" },
- "author": {
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "username": { "type": "string" },
- "id": { "type": "integer" },
- "state": { "type": "string" },
- "avatar_url": { "type": "uri" },
- "web_url": { "type": "uri" }
- },
- "additionalProperties": false
- },
- "assignee": {
- "type": ["object", "null"],
- "properties": {
- "name": { "type": "string" },
- "username": { "type": "string" },
- "id": { "type": "integer" },
- "state": { "type": "string" },
- "avatar_url": { "type": "uri" },
- "web_url": { "type": "uri" }
- },
- "additionalProperties": false
- },
- "source_project_id": { "type": "integer" },
- "target_project_id": { "type": "integer" },
- "labels": {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "work_in_progress": { "type": "boolean" },
- "milestone": {
- "type": ["object", "null"],
- "properties": {
- "id": { "type": "integer" },
- "iid": { "type": "integer" },
- "project_id": { "type": ["integer", "null"] },
- "group_id": { "type": ["integer", "null"] },
- "title": { "type": "string" },
- "description": { "type": ["string", "null"] },
- "state": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
- "due_date": { "type": "date" },
- "start_date": { "type": "date" }
- },
- "additionalProperties": false
- },
- "merge_when_pipeline_succeeds": { "type": "boolean" },
- "merge_status": { "type": "string" },
- "sha": { "type": "string" },
- "merge_commit_sha": { "type": ["string", "null"] },
- "user_notes_count": { "type": "integer" },
- "changes_count": { "type": "string" },
- "should_remove_source_branch": { "type": ["boolean", "null"] },
- "force_remove_source_branch": { "type": ["boolean", "null"] },
- "discussion_locked": { "type": ["boolean", "null"] },
- "web_url": { "type": "uri" },
- "squash": { "type": "boolean" },
- "time_stats": {
- "time_estimate": { "type": "integer" },
- "total_time_spent": { "type": "integer" },
- "human_time_estimate": { "type": ["string", "null"] },
- "human_total_time_spent": { "type": ["string", "null"] }
- },
- "allow_collaboration": { "type": ["boolean", "null"] },
- "allow_maintainer_to_push": { "type": ["boolean", "null"] }
- },
- "required": [
- "id", "iid", "project_id", "title", "description",
- "state", "created_at", "updated_at", "target_branch",
- "source_branch", "upvotes", "downvotes", "author",
- "assignee", "source_project_id", "target_project_id",
- "labels", "work_in_progress", "milestone", "merge_when_pipeline_succeeds",
- "merge_status", "sha", "merge_commit_sha", "user_notes_count",
- "should_remove_source_branch", "force_remove_source_branch",
- "web_url", "squash"
- ],
- "additionalProperties": false
+ "$ref": "./merge_request.json"
}
}
diff --git a/spec/javascripts/behaviors/secret_values_spec.js b/spec/frontend/behaviors/secret_values_spec.js
index 5aaab093c0c..5aaab093c0c 100644
--- a/spec/javascripts/behaviors/secret_values_spec.js
+++ b/spec/frontend/behaviors/secret_values_spec.js
diff --git a/spec/javascripts/blob/blob_fork_suggestion_spec.js b/spec/frontend/blob/blob_fork_suggestion_spec.js
index 9b81b7e6f92..9b81b7e6f92 100644
--- a/spec/javascripts/blob/blob_fork_suggestion_spec.js
+++ b/spec/frontend/blob/blob_fork_suggestion_spec.js
diff --git a/spec/javascripts/boards/modal_store_spec.js b/spec/frontend/boards/modal_store_spec.js
index 3257a3fb8a3..3257a3fb8a3 100644
--- a/spec/javascripts/boards/modal_store_spec.js
+++ b/spec/frontend/boards/modal_store_spec.js
diff --git a/spec/javascripts/cycle_analytics/limit_warning_component_spec.js b/spec/frontend/cycle_analytics/limit_warning_component_spec.js
index 13e9fe00a00..13e9fe00a00 100644
--- a/spec/javascripts/cycle_analytics/limit_warning_component_spec.js
+++ b/spec/frontend/cycle_analytics/limit_warning_component_spec.js
diff --git a/spec/javascripts/diffs/components/diff_stats_spec.js b/spec/frontend/diffs/components/diff_stats_spec.js
index 984b3026209..984b3026209 100644
--- a/spec/javascripts/diffs/components/diff_stats_spec.js
+++ b/spec/frontend/diffs/components/diff_stats_spec.js
diff --git a/spec/javascripts/diffs/components/edit_button_spec.js b/spec/frontend/diffs/components/edit_button_spec.js
index ccdae4cb312..ccdae4cb312 100644
--- a/spec/javascripts/diffs/components/edit_button_spec.js
+++ b/spec/frontend/diffs/components/edit_button_spec.js
diff --git a/spec/javascripts/diffs/components/hidden_files_warning_spec.js b/spec/frontend/diffs/components/hidden_files_warning_spec.js
index 5bf5ddd27bd..5bf5ddd27bd 100644
--- a/spec/javascripts/diffs/components/hidden_files_warning_spec.js
+++ b/spec/frontend/diffs/components/hidden_files_warning_spec.js
diff --git a/spec/javascripts/diffs/components/no_changes_spec.js b/spec/frontend/diffs/components/no_changes_spec.js
index e45d34bf9d5..e45d34bf9d5 100644
--- a/spec/javascripts/diffs/components/no_changes_spec.js
+++ b/spec/frontend/diffs/components/no_changes_spec.js
diff --git a/spec/javascripts/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index 503af3920a8..503af3920a8 100644
--- a/spec/javascripts/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
diff --git a/spec/javascripts/error_tracking/store/mutation_spec.js b/spec/frontend/error_tracking/store/mutation_spec.js
index 8117104bdbc..8117104bdbc 100644
--- a/spec/javascripts/error_tracking/store/mutation_spec.js
+++ b/spec/frontend/error_tracking/store/mutation_spec.js
diff --git a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js b/spec/frontend/filtered_search/filtered_search_token_keys_spec.js
index d1fea18dea8..d1fea18dea8 100644
--- a/spec/javascripts/filtered_search/filtered_search_token_keys_spec.js
+++ b/spec/frontend/filtered_search/filtered_search_token_keys_spec.js
diff --git a/spec/javascripts/filtered_search/services/recent_searches_service_error_spec.js b/spec/frontend/filtered_search/services/recent_searches_service_error_spec.js
index ea7c146fa4f..ea7c146fa4f 100644
--- a/spec/javascripts/filtered_search/services/recent_searches_service_error_spec.js
+++ b/spec/frontend/filtered_search/services/recent_searches_service_error_spec.js
diff --git a/spec/javascripts/filtered_search/stores/recent_searches_store_spec.js b/spec/frontend/filtered_search/stores/recent_searches_store_spec.js
index 56bb82ae941..56bb82ae941 100644
--- a/spec/javascripts/filtered_search/stores/recent_searches_store_spec.js
+++ b/spec/frontend/filtered_search/stores/recent_searches_store_spec.js
diff --git a/spec/javascripts/frequent_items/store/getters_spec.js b/spec/frontend/frequent_items/store/getters_spec.js
index 1cd12eb6832..1cd12eb6832 100644
--- a/spec/javascripts/frequent_items/store/getters_spec.js
+++ b/spec/frontend/frequent_items/store/getters_spec.js
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index c7008c780d6..ed12af925f1 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -3,8 +3,16 @@
import $ from 'jquery';
import GfmAutoComplete from '~/gfm_auto_complete';
-import 'vendor/jquery.caret';
-import 'vendor/jquery.atwho';
+import 'jquery.caret';
+import 'at.js';
+
+import { TEST_HOST } from 'helpers/test_constants';
+import { setTestTimeout } from 'helpers/timeout';
+import { getJSONFixture } from 'helpers/fixtures';
+
+setTestTimeout(500);
+
+const labelsFixture = getJSONFixture('autocomplete_sources/labels.json');
describe('GfmAutoComplete', () => {
const gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call({
@@ -12,11 +20,12 @@ describe('GfmAutoComplete', () => {
});
let atwhoInstance;
- let items;
let sorterValue;
describe('DefaultOptions.sorter', () => {
describe('assets loading', () => {
+ let items;
+
beforeEach(() => {
jest.spyOn(GfmAutoComplete, 'isLoading').mockReturnValue(true);
@@ -61,7 +70,7 @@ describe('GfmAutoComplete', () => {
atwhoInstance = { setting: {} };
const query = 'query';
- items = [];
+ const items = [];
const searchKey = 'searchKey';
gfmAutoCompleteCallbacks.sorter.call(atwhoInstance, query, items, searchKey);
@@ -250,4 +259,90 @@ describe('GfmAutoComplete', () => {
).toBe('<li><small>grp/proj#5</small> Some Issue</li>');
});
});
+
+ describe('labels', () => {
+ const dataSources = {
+ labels: `${TEST_HOST}/autocomplete_sources/labels`,
+ };
+
+ const allLabels = labelsFixture;
+ const assignedLabels = allLabels.filter(label => label.set);
+ const unassignedLabels = allLabels.filter(label => !label.set);
+
+ let autocomplete;
+ let $textarea;
+
+ beforeEach(() => {
+ autocomplete = new GfmAutoComplete(dataSources);
+ $textarea = $('<textarea></textarea>');
+ autocomplete.setup($textarea, { labels: true });
+ });
+
+ afterEach(() => {
+ autocomplete.destroy();
+ });
+
+ const triggerDropdown = text => {
+ $textarea
+ .trigger('focus')
+ .val(text)
+ .caret('pos', -1);
+ $textarea.trigger('keyup');
+
+ return new Promise(window.requestAnimationFrame);
+ };
+
+ const getDropdownItems = () => {
+ const dropdown = document.getElementById('at-view-labels');
+ const items = dropdown.getElementsByTagName('li');
+ return [].map.call(items, item => item.textContent.trim());
+ };
+
+ const expectLabels = ({ input, output }) =>
+ triggerDropdown(input).then(() => {
+ expect(getDropdownItems()).toEqual(output.map(label => label.title));
+ });
+
+ describe('with no labels assigned', () => {
+ beforeEach(() => {
+ autocomplete.cachedData['~'] = [...unassignedLabels];
+ });
+
+ it.each`
+ input | output
+ ${'~'} | ${unassignedLabels}
+ ${'/label ~'} | ${unassignedLabels}
+ ${'/relabel ~'} | ${unassignedLabels}
+ ${'/unlabel ~'} | ${[]}
+ `('$input shows $output.length labels', expectLabels);
+ });
+
+ describe('with some labels assigned', () => {
+ beforeEach(() => {
+ autocomplete.cachedData['~'] = allLabels;
+ });
+
+ it.each`
+ input | output
+ ${'~'} | ${allLabels}
+ ${'/label ~'} | ${unassignedLabels}
+ ${'/relabel ~'} | ${allLabels}
+ ${'/unlabel ~'} | ${assignedLabels}
+ `('$input shows $output.length labels', expectLabels);
+ });
+
+ describe('with all labels assigned', () => {
+ beforeEach(() => {
+ autocomplete.cachedData['~'] = [...assignedLabels];
+ });
+
+ it.each`
+ input | output
+ ${'~'} | ${assignedLabels}
+ ${'/label ~'} | ${[]}
+ ${'/relabel ~'} | ${assignedLabels}
+ ${'/unlabel ~'} | ${assignedLabels}
+ `('$input shows $output.length labels', expectLabels);
+ });
+ });
});
diff --git a/spec/frontend/helpers/fixtures.js b/spec/frontend/helpers/fixtures.js
new file mode 100644
index 00000000000..f96f27c4d80
--- /dev/null
+++ b/spec/frontend/helpers/fixtures.js
@@ -0,0 +1,24 @@
+/* eslint-disable import/prefer-default-export, global-require, import/no-dynamic-require */
+
+import fs from 'fs';
+import path from 'path';
+
+// jest-util is part of Jest
+// eslint-disable-next-line import/no-extraneous-dependencies
+import { ErrorWithStack } from 'jest-util';
+
+const fixturesBasePath = path.join(process.cwd(), 'spec', 'javascripts', 'fixtures');
+
+export function getJSONFixture(relativePath, ee = false) {
+ const absolutePath = path.join(fixturesBasePath, ee ? 'ee' : '', relativePath);
+ if (!fs.existsSync(absolutePath)) {
+ throw new ErrorWithStack(
+ `Fixture file ${relativePath} does not exist.
+
+Did you run bin/rake karma:fixtures?`,
+ getJSONFixture,
+ );
+ }
+
+ return require(absolutePath);
+}
diff --git a/spec/frontend/helpers/timeout.js b/spec/frontend/helpers/timeout.js
new file mode 100644
index 00000000000..318593a48a4
--- /dev/null
+++ b/spec/frontend/helpers/timeout.js
@@ -0,0 +1,24 @@
+let testTimeoutInMs;
+
+export const setTestTimeout = newTimeoutInMs => {
+ testTimeoutInMs = newTimeoutInMs;
+ jest.setTimeout(newTimeoutInMs);
+};
+
+export const initializeTestTimeout = defaultTimeoutInMs => {
+ setTestTimeout(defaultTimeoutInMs);
+
+ let testStartTime;
+
+ // https://github.com/facebook/jest/issues/6947
+ beforeEach(() => {
+ testStartTime = Date.now();
+ });
+
+ afterEach(() => {
+ const elapsedTimeInMs = Date.now() - testStartTime;
+ if (elapsedTimeInMs > testTimeoutInMs) {
+ throw new Error(`Test took too long (${elapsedTimeInMs}ms > ${testTimeoutInMs}ms)!`);
+ }
+ });
+};
diff --git a/spec/frontend/helpers/vue_mount_component_helper.js b/spec/frontend/helpers/vue_mount_component_helper.js
new file mode 100644
index 00000000000..6848c95d95d
--- /dev/null
+++ b/spec/frontend/helpers/vue_mount_component_helper.js
@@ -0,0 +1,38 @@
+import Vue from 'vue';
+
+const mountComponent = (Component, props = {}, el = null) =>
+ new Component({
+ propsData: props,
+ }).$mount(el);
+
+export const createComponentWithStore = (Component, store, propsData = {}) =>
+ new Component({
+ store,
+ propsData,
+ });
+
+export const mountComponentWithStore = (Component, { el, props, store }) =>
+ new Component({
+ store,
+ propsData: props || {},
+ }).$mount(el);
+
+export const mountComponentWithSlots = (Component, { props, slots }) => {
+ const component = new Component({
+ propsData: props || {},
+ });
+
+ component.$slots = slots;
+
+ return component.$mount();
+};
+
+/**
+ * Mount a component with the given render method.
+ *
+ * This helps with inserting slots that need to be compiled.
+ */
+export const mountComponentWithRender = (render, el = null) =>
+ mountComponent(Vue.extend({ render }), {}, el);
+
+export default mountComponent;
diff --git a/spec/javascripts/ide/lib/common/disposable_spec.js b/spec/frontend/ide/lib/common/disposable_spec.js
index af12ca15369..af12ca15369 100644
--- a/spec/javascripts/ide/lib/common/disposable_spec.js
+++ b/spec/frontend/ide/lib/common/disposable_spec.js
diff --git a/spec/javascripts/ide/lib/diff/diff_spec.js b/spec/frontend/ide/lib/diff/diff_spec.js
index 57f3ac3d365..57f3ac3d365 100644
--- a/spec/javascripts/ide/lib/diff/diff_spec.js
+++ b/spec/frontend/ide/lib/diff/diff_spec.js
diff --git a/spec/javascripts/ide/lib/editor_options_spec.js b/spec/frontend/ide/lib/editor_options_spec.js
index d149a883166..d149a883166 100644
--- a/spec/javascripts/ide/lib/editor_options_spec.js
+++ b/spec/frontend/ide/lib/editor_options_spec.js
diff --git a/spec/javascripts/ide/lib/files_spec.js b/spec/frontend/ide/lib/files_spec.js
index fe791aa2b74..fe791aa2b74 100644
--- a/spec/javascripts/ide/lib/files_spec.js
+++ b/spec/frontend/ide/lib/files_spec.js
diff --git a/spec/javascripts/ide/stores/modules/commit/mutations_spec.js b/spec/frontend/ide/stores/modules/commit/mutations_spec.js
index 5de7a281d34..5de7a281d34 100644
--- a/spec/javascripts/ide/stores/modules/commit/mutations_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/mutations_spec.js
diff --git a/spec/javascripts/ide/stores/modules/file_templates/getters_spec.js b/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
index 17cb457881f..17cb457881f 100644
--- a/spec/javascripts/ide/stores/modules/file_templates/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
diff --git a/spec/javascripts/ide/stores/modules/file_templates/mutations_spec.js b/spec/frontend/ide/stores/modules/file_templates/mutations_spec.js
index 8e0e3ae99a1..8e0e3ae99a1 100644
--- a/spec/javascripts/ide/stores/modules/file_templates/mutations_spec.js
+++ b/spec/frontend/ide/stores/modules/file_templates/mutations_spec.js
diff --git a/spec/javascripts/ide/stores/modules/pane/getters_spec.js b/spec/frontend/ide/stores/modules/pane/getters_spec.js
index 8a213323de0..8a213323de0 100644
--- a/spec/javascripts/ide/stores/modules/pane/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/pane/getters_spec.js
diff --git a/spec/javascripts/ide/stores/modules/pane/mutations_spec.js b/spec/frontend/ide/stores/modules/pane/mutations_spec.js
index b5fcd35912e..b5fcd35912e 100644
--- a/spec/javascripts/ide/stores/modules/pane/mutations_spec.js
+++ b/spec/frontend/ide/stores/modules/pane/mutations_spec.js
diff --git a/spec/javascripts/ide/stores/modules/pipelines/getters_spec.js b/spec/frontend/ide/stores/modules/pipelines/getters_spec.js
index 4514896b5ea..4514896b5ea 100644
--- a/spec/javascripts/ide/stores/modules/pipelines/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/pipelines/getters_spec.js
diff --git a/spec/javascripts/ide/stores/mutations/branch_spec.js b/spec/frontend/ide/stores/mutations/branch_spec.js
index 29eb859ddaf..29eb859ddaf 100644
--- a/spec/javascripts/ide/stores/mutations/branch_spec.js
+++ b/spec/frontend/ide/stores/mutations/branch_spec.js
diff --git a/spec/javascripts/ide/stores/mutations/merge_request_spec.js b/spec/frontend/ide/stores/mutations/merge_request_spec.js
index e30ca22022f..e30ca22022f 100644
--- a/spec/javascripts/ide/stores/mutations/merge_request_spec.js
+++ b/spec/frontend/ide/stores/mutations/merge_request_spec.js
diff --git a/spec/javascripts/image_diff/view_types_spec.js b/spec/frontend/image_diff/view_types_spec.js
index e9639f46497..e9639f46497 100644
--- a/spec/javascripts/image_diff/view_types_spec.js
+++ b/spec/frontend/image_diff/view_types_spec.js
diff --git a/spec/javascripts/import_projects/store/getters_spec.js b/spec/frontend/import_projects/store/getters_spec.js
index e5e4a95f473..e5e4a95f473 100644
--- a/spec/javascripts/import_projects/store/getters_spec.js
+++ b/spec/frontend/import_projects/store/getters_spec.js
diff --git a/spec/javascripts/import_projects/store/mutations_spec.js b/spec/frontend/import_projects/store/mutations_spec.js
index 8db8e9819ba..8db8e9819ba 100644
--- a/spec/javascripts/import_projects/store/mutations_spec.js
+++ b/spec/frontend/import_projects/store/mutations_spec.js
diff --git a/spec/javascripts/jobs/components/empty_state_spec.js b/spec/frontend/jobs/components/empty_state_spec.js
index a2df79bdda0..a2df79bdda0 100644
--- a/spec/javascripts/jobs/components/empty_state_spec.js
+++ b/spec/frontend/jobs/components/empty_state_spec.js
diff --git a/spec/javascripts/jobs/components/erased_block_spec.js b/spec/frontend/jobs/components/erased_block_spec.js
index 8e0433d3fb7..8e0433d3fb7 100644
--- a/spec/javascripts/jobs/components/erased_block_spec.js
+++ b/spec/frontend/jobs/components/erased_block_spec.js
diff --git a/spec/javascripts/jobs/components/sidebar_detail_row_spec.js b/spec/frontend/jobs/components/sidebar_detail_row_spec.js
index 42d11266dad..42d11266dad 100644
--- a/spec/javascripts/jobs/components/sidebar_detail_row_spec.js
+++ b/spec/frontend/jobs/components/sidebar_detail_row_spec.js
diff --git a/spec/javascripts/jobs/components/stuck_block_spec.js b/spec/frontend/jobs/components/stuck_block_spec.js
index c320793b2be..c320793b2be 100644
--- a/spec/javascripts/jobs/components/stuck_block_spec.js
+++ b/spec/frontend/jobs/components/stuck_block_spec.js
diff --git a/spec/javascripts/jobs/store/getters_spec.js b/spec/frontend/jobs/store/getters_spec.js
index 379114c3737..379114c3737 100644
--- a/spec/javascripts/jobs/store/getters_spec.js
+++ b/spec/frontend/jobs/store/getters_spec.js
diff --git a/spec/javascripts/jobs/store/mutations_spec.js b/spec/frontend/jobs/store/mutations_spec.js
index d7908efcf13..d7908efcf13 100644
--- a/spec/javascripts/jobs/store/mutations_spec.js
+++ b/spec/frontend/jobs/store/mutations_spec.js
diff --git a/spec/javascripts/labels_select_spec.js b/spec/frontend/labels_select_spec.js
index acfdc885032..acfdc885032 100644
--- a/spec/javascripts/labels_select_spec.js
+++ b/spec/frontend/labels_select_spec.js
diff --git a/spec/frontend/lib/utils/autosave_spec.js b/spec/frontend/lib/utils/autosave_spec.js
new file mode 100644
index 00000000000..12e97f6cdec
--- /dev/null
+++ b/spec/frontend/lib/utils/autosave_spec.js
@@ -0,0 +1,64 @@
+import { clearDraft, getDraft, updateDraft } from '~/lib/utils/autosave';
+
+describe('autosave utils', () => {
+ const autosaveKey = 'dummy-autosave-key';
+ const text = 'some dummy text';
+
+ describe('clearDraft', () => {
+ beforeEach(() => {
+ localStorage.setItem(`autosave/${autosaveKey}`, text);
+ });
+
+ afterEach(() => {
+ localStorage.removeItem(`autosave/${autosaveKey}`);
+ });
+
+ it('removes the draft from localStorage', () => {
+ clearDraft(autosaveKey);
+
+ expect(localStorage.getItem(`autosave/${autosaveKey}`)).toBe(null);
+ });
+ });
+
+ describe('getDraft', () => {
+ beforeEach(() => {
+ localStorage.setItem(`autosave/${autosaveKey}`, text);
+ });
+
+ afterEach(() => {
+ localStorage.removeItem(`autosave/${autosaveKey}`);
+ });
+
+ it('returns the draft from localStorage', () => {
+ const result = getDraft(autosaveKey);
+
+ expect(result).toBe(text);
+ });
+
+ it('returns null if no entry exists in localStorage', () => {
+ localStorage.removeItem(`autosave/${autosaveKey}`);
+
+ const result = getDraft(autosaveKey);
+
+ expect(result).toBe(null);
+ });
+ });
+
+ describe('updateDraft', () => {
+ beforeEach(() => {
+ localStorage.setItem(`autosave/${autosaveKey}`, text);
+ });
+
+ afterEach(() => {
+ localStorage.removeItem(`autosave/${autosaveKey}`);
+ });
+
+ it('removes the draft from localStorage', () => {
+ const newText = 'new text';
+
+ updateDraft(autosaveKey, newText);
+
+ expect(localStorage.getItem(`autosave/${autosaveKey}`)).toBe(newText);
+ });
+ });
+});
diff --git a/spec/javascripts/lib/utils/cache_spec.js b/spec/frontend/lib/utils/cache_spec.js
index 2fe02a7592c..2fe02a7592c 100644
--- a/spec/javascripts/lib/utils/cache_spec.js
+++ b/spec/frontend/lib/utils/cache_spec.js
diff --git a/spec/javascripts/lib/utils/grammar_spec.js b/spec/frontend/lib/utils/grammar_spec.js
index 377b2ffb48c..377b2ffb48c 100644
--- a/spec/javascripts/lib/utils/grammar_spec.js
+++ b/spec/frontend/lib/utils/grammar_spec.js
diff --git a/spec/javascripts/lib/utils/image_utility_spec.js b/spec/frontend/lib/utils/image_utility_spec.js
index a7eff419fba..a7eff419fba 100644
--- a/spec/javascripts/lib/utils/image_utility_spec.js
+++ b/spec/frontend/lib/utils/image_utility_spec.js
diff --git a/spec/javascripts/lib/utils/number_utility_spec.js b/spec/frontend/lib/utils/number_utility_spec.js
index 818404bad81..818404bad81 100644
--- a/spec/javascripts/lib/utils/number_utility_spec.js
+++ b/spec/frontend/lib/utils/number_utility_spec.js
diff --git a/spec/javascripts/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index 0a266b19ea5..0a266b19ea5 100644
--- a/spec/javascripts/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
diff --git a/spec/javascripts/locale/ensure_single_line_spec.js b/spec/frontend/locale/ensure_single_line_spec.js
index 20b04cab9c8..20b04cab9c8 100644
--- a/spec/javascripts/locale/ensure_single_line_spec.js
+++ b/spec/frontend/locale/ensure_single_line_spec.js
diff --git a/spec/javascripts/locale/sprintf_spec.js b/spec/frontend/locale/sprintf_spec.js
index 52e903b819f..52e903b819f 100644
--- a/spec/javascripts/locale/sprintf_spec.js
+++ b/spec/frontend/locale/sprintf_spec.js
diff --git a/spec/javascripts/notebook/lib/highlight_spec.js b/spec/frontend/notebook/lib/highlight_spec.js
index d71c5718858..d71c5718858 100644
--- a/spec/javascripts/notebook/lib/highlight_spec.js
+++ b/spec/frontend/notebook/lib/highlight_spec.js
diff --git a/spec/javascripts/notes/components/discussion_reply_placeholder_spec.js b/spec/frontend/notes/components/discussion_reply_placeholder_spec.js
index 07a366cf339..07a366cf339 100644
--- a/spec/javascripts/notes/components/discussion_reply_placeholder_spec.js
+++ b/spec/frontend/notes/components/discussion_reply_placeholder_spec.js
diff --git a/spec/javascripts/notes/components/discussion_resolve_button_spec.js b/spec/frontend/notes/components/discussion_resolve_button_spec.js
index 5024f40ec5d..5024f40ec5d 100644
--- a/spec/javascripts/notes/components/discussion_resolve_button_spec.js
+++ b/spec/frontend/notes/components/discussion_resolve_button_spec.js
diff --git a/spec/javascripts/notes/components/note_attachment_spec.js b/spec/frontend/notes/components/note_attachment_spec.js
index b14a518b622..b14a518b622 100644
--- a/spec/javascripts/notes/components/note_attachment_spec.js
+++ b/spec/frontend/notes/components/note_attachment_spec.js
diff --git a/spec/javascripts/notes/components/note_edited_text_spec.js b/spec/frontend/notes/components/note_edited_text_spec.js
index e4c8d954d50..e4c8d954d50 100644
--- a/spec/javascripts/notes/components/note_edited_text_spec.js
+++ b/spec/frontend/notes/components/note_edited_text_spec.js
diff --git a/spec/javascripts/performance_bar/services/performance_bar_service_spec.js b/spec/frontend/performance_bar/services/performance_bar_service_spec.js
index cfec4b779e4..cfec4b779e4 100644
--- a/spec/javascripts/performance_bar/services/performance_bar_service_spec.js
+++ b/spec/frontend/performance_bar/services/performance_bar_service_spec.js
diff --git a/spec/javascripts/pipelines/blank_state_spec.js b/spec/frontend/pipelines/blank_state_spec.js
index 033bd5ccb73..033bd5ccb73 100644
--- a/spec/javascripts/pipelines/blank_state_spec.js
+++ b/spec/frontend/pipelines/blank_state_spec.js
diff --git a/spec/javascripts/pipelines/empty_state_spec.js b/spec/frontend/pipelines/empty_state_spec.js
index f12950b8fce..f12950b8fce 100644
--- a/spec/javascripts/pipelines/empty_state_spec.js
+++ b/spec/frontend/pipelines/empty_state_spec.js
diff --git a/spec/javascripts/pipelines/pipeline_store_spec.js b/spec/frontend/pipelines/pipeline_store_spec.js
index 1d5754d1f05..1d5754d1f05 100644
--- a/spec/javascripts/pipelines/pipeline_store_spec.js
+++ b/spec/frontend/pipelines/pipeline_store_spec.js
diff --git a/spec/javascripts/pipelines/pipelines_store_spec.js b/spec/frontend/pipelines/pipelines_store_spec.js
index ce21f788ed5..ce21f788ed5 100644
--- a/spec/javascripts/pipelines/pipelines_store_spec.js
+++ b/spec/frontend/pipelines/pipelines_store_spec.js
diff --git a/spec/javascripts/registry/getters_spec.js b/spec/frontend/registry/getters_spec.js
index 839aa718997..839aa718997 100644
--- a/spec/javascripts/registry/getters_spec.js
+++ b/spec/frontend/registry/getters_spec.js
diff --git a/spec/javascripts/reports/components/report_link_spec.js b/spec/frontend/reports/components/report_link_spec.js
index f879899e9c5..f879899e9c5 100644
--- a/spec/javascripts/reports/components/report_link_spec.js
+++ b/spec/frontend/reports/components/report_link_spec.js
diff --git a/spec/javascripts/reports/store/utils_spec.js b/spec/frontend/reports/store/utils_spec.js
index 1679d120db2..1679d120db2 100644
--- a/spec/javascripts/reports/store/utils_spec.js
+++ b/spec/frontend/reports/store/utils_spec.js
diff --git a/spec/javascripts/sidebar/confidential_edit_buttons_spec.js b/spec/frontend/sidebar/confidential_edit_buttons_spec.js
index 32da9f83112..32da9f83112 100644
--- a/spec/javascripts/sidebar/confidential_edit_buttons_spec.js
+++ b/spec/frontend/sidebar/confidential_edit_buttons_spec.js
diff --git a/spec/javascripts/sidebar/confidential_edit_form_buttons_spec.js b/spec/frontend/sidebar/confidential_edit_form_buttons_spec.js
index 369088cb258..369088cb258 100644
--- a/spec/javascripts/sidebar/confidential_edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/confidential_edit_form_buttons_spec.js
diff --git a/spec/javascripts/sidebar/lock/edit_form_spec.js b/spec/frontend/sidebar/lock/edit_form_spec.js
index ec10a999a40..ec10a999a40 100644
--- a/spec/javascripts/sidebar/lock/edit_form_spec.js
+++ b/spec/frontend/sidebar/lock/edit_form_spec.js
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index d892889b98d..006fc60ef57 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -1,24 +1,15 @@
import Vue from 'vue';
import Translate from '~/vue_shared/translate';
import axios from '~/lib/utils/axios_utils';
+import { initializeTestTimeout } from './helpers/timeout';
-const testTimeoutInMs = 300;
-jest.setTimeout(testTimeoutInMs);
-
-let testStartTime;
-
-// https://github.com/facebook/jest/issues/6947
-beforeEach(() => {
- testStartTime = Date.now();
-});
-
+// wait for pending setTimeout()s
afterEach(() => {
- const elapsedTimeInMs = Date.now() - testStartTime;
- if (elapsedTimeInMs > testTimeoutInMs) {
- throw new Error(`Test took too long (${elapsedTimeInMs}ms > ${testTimeoutInMs}ms)!`);
- }
+ jest.runAllTimers();
});
+initializeTestTimeout(300);
+
// fail tests for unmocked requests
beforeEach(done => {
axios.defaults.adapter = config => {
diff --git a/spec/javascripts/u2f/util_spec.js b/spec/frontend/u2f/util_spec.js
index 32cd6891384..32cd6891384 100644
--- a/spec/javascripts/u2f/util_spec.js
+++ b/spec/frontend/u2f/util_spec.js
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_container_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js
index 16c8c939a6f..16c8c939a6f 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_icon_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js
index f7c2376eebf..f7c2376eebf 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_icon_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js
diff --git a/spec/javascripts/vue_mr_widget/components/states/commit_edit_spec.js b/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js
index 994d6255324..994d6255324 100644
--- a/spec/javascripts/vue_mr_widget/components/states/commit_edit_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
index daf1cc2d98b..daf1cc2d98b 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_commits_header_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
index 5cf6408cf34..9ee2f88c78d 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
@@ -15,6 +15,7 @@ describe('Commits header component', () => {
isSquashEnabled: false,
targetBranch: 'master',
commitsCount: 5,
+ isFastForwardEnabled: false,
...props,
},
});
@@ -31,6 +32,27 @@ describe('Commits header component', () => {
const findTargetBranchMessage = () => wrapper.find('.label-branch');
const findModifyButton = () => wrapper.find('.modify-message-button');
+ describe('when fast-forward is enabled', () => {
+ beforeEach(() => {
+ createComponent({
+ isFastForwardEnabled: true,
+ isSquashEnabled: true,
+ });
+ });
+
+ it('has commits count message showing 1 commit', () => {
+ expect(findCommitsCountMessage().text()).toBe('1 commit');
+ });
+
+ it('has button with modify commit message', () => {
+ expect(findModifyButton().text()).toBe('Modify commit message');
+ });
+
+ it('does not have merge commit part of the message', () => {
+ expect(findHeaderWrapper().text()).not.toContain('1 merge commit');
+ });
+ });
+
describe('when collapsed', () => {
it('toggle has aria-label equal to Expand', () => {
createComponent();
@@ -78,6 +100,10 @@ describe('Commits header component', () => {
expect(findTargetBranchMessage().text()).toBe('master');
});
+
+ it('does has merge commit part of the message', () => {
+ expect(findHeaderWrapper().text()).toContain('1 merge commit');
+ });
});
describe('when expanded', () => {
diff --git a/spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js b/spec/frontend/vue_mr_widget/stores/get_state_key_spec.js
index b356ea85cad..b356ea85cad 100644
--- a/spec/javascripts/vue_mr_widget/stores/get_state_key_spec.js
+++ b/spec/frontend/vue_mr_widget/stores/get_state_key_spec.js
diff --git a/spec/javascripts/vue_shared/components/callout_spec.js b/spec/frontend/vue_shared/components/callout_spec.js
index 91208dfb31a..91208dfb31a 100644
--- a/spec/javascripts/vue_shared/components/callout_spec.js
+++ b/spec/frontend/vue_shared/components/callout_spec.js
diff --git a/spec/javascripts/vue_shared/components/code_block_spec.js b/spec/frontend/vue_shared/components/code_block_spec.js
index 6b91a20ff76..6b91a20ff76 100644
--- a/spec/javascripts/vue_shared/components/code_block_spec.js
+++ b/spec/frontend/vue_shared/components/code_block_spec.js
diff --git a/spec/javascripts/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js b/spec/frontend/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js
index c4358f0d9cb..c4358f0d9cb 100644
--- a/spec/javascripts/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js
+++ b/spec/frontend/vue_shared/components/diff_viewer/viewers/mode_changed_spec.js
diff --git a/spec/javascripts/vue_shared/components/identicon_spec.js b/spec/frontend/vue_shared/components/identicon_spec.js
index 0b3dbb61c96..0b3dbb61c96 100644
--- a/spec/javascripts/vue_shared/components/identicon_spec.js
+++ b/spec/frontend/vue_shared/components/identicon_spec.js
diff --git a/spec/javascripts/vue_shared/components/lib/utils/dom_utils_spec.js b/spec/frontend/vue_shared/components/lib/utils/dom_utils_spec.js
index 2388660b0c2..2388660b0c2 100644
--- a/spec/javascripts/vue_shared/components/lib/utils/dom_utils_spec.js
+++ b/spec/frontend/vue_shared/components/lib/utils/dom_utils_spec.js
diff --git a/spec/javascripts/vue_shared/components/pagination_links_spec.js b/spec/frontend/vue_shared/components/pagination_links_spec.js
index d0cb3731050..d0cb3731050 100644
--- a/spec/javascripts/vue_shared/components/pagination_links_spec.js
+++ b/spec/frontend/vue_shared/components/pagination_links_spec.js
diff --git a/spec/javascripts/vue_shared/components/time_ago_tooltip_spec.js b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
index 536bb57b946..536bb57b946 100644
--- a/spec/javascripts/vue_shared/components/time_ago_tooltip_spec.js
+++ b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
diff --git a/spec/javascripts/vuex_shared/modules/modal/mutations_spec.js b/spec/frontend/vuex_shared/modules/modal/mutations_spec.js
index d07f8ba1e65..d07f8ba1e65 100644
--- a/spec/javascripts/vuex_shared/modules/modal/mutations_spec.js
+++ b/spec/frontend/vuex_shared/modules/modal/mutations_spec.js
diff --git a/spec/helpers/auth_helper_spec.rb b/spec/helpers/auth_helper_spec.rb
index f0c2e4768ec..2ba8b3dbf22 100644
--- a/spec/helpers/auth_helper_spec.rb
+++ b/spec/helpers/auth_helper_spec.rb
@@ -97,17 +97,37 @@ describe AuthHelper do
end
end
- describe 'unlink_allowed?' do
- [:saml, :cas3].each do |provider|
- it "returns true if the provider is #{provider}" do
- expect(helper.unlink_allowed?(provider)).to be false
- end
+ describe '#link_provider_allowed?' do
+ let(:policy) { instance_double('IdentityProviderPolicy') }
+ let(:current_user) { instance_double('User') }
+ let(:provider) { double }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(IdentityProviderPolicy).to receive(:new).with(current_user, provider).and_return(policy)
end
- [:twitter, :facebook, :google_oauth2, :gitlab, :github, :bitbucket, :crowd, :auth0, :authentiq].each do |provider|
- it "returns false if the provider is #{provider}" do
- expect(helper.unlink_allowed?(provider)).to be true
- end
+ it 'delegates to identity provider policy' do
+ allow(policy).to receive(:can?).with(:link).and_return('policy_link_result')
+
+ expect(helper.link_provider_allowed?(provider)).to eq 'policy_link_result'
+ end
+ end
+
+ describe '#unlink_provider_allowed?' do
+ let(:policy) { instance_double('IdentityProviderPolicy') }
+ let(:current_user) { instance_double('User') }
+ let(:provider) { double }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(IdentityProviderPolicy).to receive(:new).with(current_user, provider).and_return(policy)
+ end
+
+ it 'delegates to identity provider policy' do
+ allow(policy).to receive(:can?).with(:unlink).and_return('policy_unlink_result')
+
+ expect(helper.unlink_provider_allowed?(provider)).to eq 'policy_unlink_result'
end
end
end
diff --git a/spec/helpers/auto_devops_helper_spec.rb b/spec/helpers/auto_devops_helper_spec.rb
index 223e562238d..d2540696b17 100644
--- a/spec/helpers/auto_devops_helper_spec.rb
+++ b/spec/helpers/auto_devops_helper_spec.rb
@@ -29,11 +29,11 @@ describe AutoDevopsHelper do
end
context 'when the banner is disabled by feature flag' do
- it 'allows the feature flag to disable' do
+ before do
Feature.get(:auto_devops_banner_disabled).enable
-
- expect(subject).to be(false)
end
+
+ it { is_expected.to be_falsy }
end
context 'when dismissed' do
@@ -90,4 +90,136 @@ describe AutoDevopsHelper do
it { is_expected.to eq(false) }
end
end
+
+ describe '#badge_for_auto_devops_scope' do
+ subject { helper.badge_for_auto_devops_scope(receiver) }
+
+ context 'when receiver is a group' do
+ context 'when explicitly enabled' do
+ let(:receiver) { create(:group, :auto_devops_enabled) }
+
+ it { is_expected.to eq('group enabled') }
+ end
+
+ context 'when explicitly disabled' do
+ let(:receiver) { create(:group, :auto_devops_disabled) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when auto devops is implicitly enabled' do
+ let(:receiver) { create(:group) }
+
+ context 'by instance' do
+ before do
+ stub_application_setting(auto_devops_enabled: true)
+ end
+
+ it { is_expected.to eq('instance enabled') }
+ end
+
+ context 'with groups', :nested_groups do
+ before do
+ receiver.update(parent: parent)
+ end
+
+ context 'when auto devops is enabled on parent' do
+ let(:parent) { create(:group, :auto_devops_enabled) }
+
+ it { is_expected.to eq('group enabled') }
+ end
+
+ context 'when auto devops is enabled on parent group' do
+ let(:root_parent) { create(:group, :auto_devops_enabled) }
+ let(:parent) { create(:group, parent: root_parent) }
+
+ it { is_expected.to eq('group enabled') }
+ end
+
+ context 'when auto devops disabled set on parent group' do
+ let(:root_parent) { create(:group, :auto_devops_disabled) }
+ let(:parent) { create(:group, parent: root_parent) }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+
+ context 'when receiver is a project' do
+ context 'when auto devops is enabled at project level' do
+ let(:receiver) { create(:project, :auto_devops) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when auto devops is disabled at project level' do
+ let(:receiver) { create(:project, :auto_devops_disabled) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when auto devops is implicitly enabled' do
+ let(:receiver) { create(:project) }
+
+ context 'by instance' do
+ before do
+ stub_application_setting(auto_devops_enabled: true)
+ end
+
+ it { is_expected.to eq('instance enabled') }
+ end
+
+ context 'with groups', :nested_groups do
+ let(:receiver) { create(:project, :repository, namespace: group) }
+
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ end
+
+ context 'when auto devops is enabled on group level' do
+ let(:group) { create(:group, :auto_devops_enabled) }
+
+ it { is_expected.to eq('group enabled') }
+ end
+
+ context 'when auto devops is enabled on root group' do
+ let(:root_parent) { create(:group, :auto_devops_enabled) }
+ let(:group) { create(:group, parent: root_parent) }
+
+ it { is_expected.to eq('group enabled') }
+ end
+ end
+ end
+
+ context 'when auto devops is implicitly disabled' do
+ let(:receiver) { create(:project) }
+
+ context 'by instance' do
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with groups', :nested_groups do
+ let(:receiver) { create(:project, :repository, namespace: group) }
+
+ context 'when auto devops is disabled on group level' do
+ let(:group) { create(:group, :auto_devops_disabled) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when root group is enabled and parent disabled' do
+ let(:root_parent) { create(:group, :auto_devops_enabled) }
+ let(:group) { create(:group, :auto_devops_disabled, parent: root_parent) }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
new file mode 100644
index 00000000000..4ea0f76fc28
--- /dev/null
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ClustersHelper do
+ describe '#has_rbac_enabled?' do
+ context 'when kubernetes platform has been created' do
+ let(:platform_kubernetes) { build_stubbed(:cluster_platform_kubernetes) }
+ let(:cluster) { build_stubbed(:cluster, :provided_by_gcp, platform_kubernetes: platform_kubernetes) }
+
+ it 'returns kubernetes platform value' do
+ expect(helper.has_rbac_enabled?(cluster)).to be_truthy
+ end
+ end
+
+ context 'when kubernetes platform has not been created yet' do
+ let(:cluster) { build_stubbed(:cluster, :providing_by_gcp) }
+
+ it 'delegates to cluster provider' do
+ expect(helper.has_rbac_enabled?(cluster)).to be_truthy
+ end
+
+ context 'when ABAC cluster is created' do
+ let(:provider) { build_stubbed(:cluster_provider_gcp, :abac_enabled) }
+ let(:cluster) { build_stubbed(:cluster, :providing_by_gcp, provider_gcp: provider) }
+
+ it 'delegates to cluster provider' do
+ expect(helper.has_rbac_enabled?(cluster)).to be_falsy
+ end
+ end
+ end
+ end
+end
diff --git a/spec/javascripts/badges/components/badge_list_spec.js b/spec/javascripts/badges/components/badge_list_spec.js
index 536671db377..2f72c9ed89d 100644
--- a/spec/javascripts/badges/components/badge_list_spec.js
+++ b/spec/javascripts/badges/components/badge_list_spec.js
@@ -60,7 +60,7 @@ describe('BadgeList component', () => {
Vue.nextTick()
.then(() => {
- const loadingIcon = vm.$el.querySelector('.fa-spinner');
+ const loadingIcon = vm.$el.querySelector('.spinner');
expect(loadingIcon).toBeVisible();
})
diff --git a/spec/javascripts/badges/components/badge_spec.js b/spec/javascripts/badges/components/badge_spec.js
index 29805408bcf..4e4d1ae2e99 100644
--- a/spec/javascripts/badges/components/badge_spec.js
+++ b/spec/javascripts/badges/components/badge_spec.js
@@ -15,7 +15,7 @@ describe('Badge component', () => {
const buttons = vm.$el.querySelectorAll('button');
return {
badgeImage: vm.$el.querySelector('img.project-badge'),
- loadingIcon: vm.$el.querySelector('.fa-spinner'),
+ loadingIcon: vm.$el.querySelector('.spinner'),
reloadButton: buttons[buttons.length - 1],
};
};
diff --git a/spec/javascripts/boards/board_list_spec.js b/spec/javascripts/boards/board_list_spec.js
index 2642c8b1bdb..396fc823ef5 100644
--- a/spec/javascripts/boards/board_list_spec.js
+++ b/spec/javascripts/boards/board_list_spec.js
@@ -195,7 +195,7 @@ describe('Board list component', () => {
component.list.loadingMore = true;
Vue.nextTick(() => {
- expect(component.$el.querySelector('.board-list-count .fa-spinner')).not.toBeNull();
+ expect(component.$el.querySelector('.board-list-count .spinner')).not.toBeNull();
done();
});
diff --git a/spec/javascripts/clusters/clusters_bundle_spec.js b/spec/javascripts/clusters/clusters_bundle_spec.js
index 7928feeadfa..71f16dc259e 100644
--- a/spec/javascripts/clusters/clusters_bundle_spec.js
+++ b/spec/javascripts/clusters/clusters_bundle_spec.js
@@ -1,5 +1,10 @@
import Clusters from '~/clusters/clusters_bundle';
-import { REQUEST_SUBMITTED, REQUEST_FAILURE, APPLICATION_STATUS } from '~/clusters/constants';
+import {
+ REQUEST_SUBMITTED,
+ REQUEST_FAILURE,
+ APPLICATION_STATUS,
+ INGRESS_DOMAIN_SUFFIX,
+} from '~/clusters/constants';
import getSetTimeoutPromise from 'spec/helpers/set_timeout_promise_helper';
describe('Clusters', () => {
@@ -265,4 +270,77 @@ describe('Clusters', () => {
.catch(done.fail);
});
});
+
+ describe('handleSuccess', () => {
+ beforeEach(() => {
+ spyOn(cluster.store, 'updateStateFromServer');
+ spyOn(cluster, 'toggleIngressDomainHelpText');
+ spyOn(cluster, 'checkForNewInstalls');
+ spyOn(cluster, 'updateContainer');
+
+ cluster.handleSuccess({ data: {} });
+ });
+
+ it('updates clusters store', () => {
+ expect(cluster.store.updateStateFromServer).toHaveBeenCalled();
+ });
+
+ it('checks for new installable apps', () => {
+ expect(cluster.checkForNewInstalls).toHaveBeenCalled();
+ });
+
+ it('toggles ingress domain help text', () => {
+ expect(cluster.toggleIngressDomainHelpText).toHaveBeenCalled();
+ });
+
+ it('updates message containers', () => {
+ expect(cluster.updateContainer).toHaveBeenCalled();
+ });
+ });
+
+ describe('toggleIngressDomainHelpText', () => {
+ const { INSTALLED, INSTALLABLE, NOT_INSTALLABLE } = APPLICATION_STATUS;
+
+ const ingressPreviousState = { status: INSTALLABLE };
+ const ingressNewState = { status: INSTALLED, externalIp: '127.0.0.1' };
+
+ describe(`when ingress application new status is ${INSTALLED}`, () => {
+ beforeEach(() => {
+ ingressNewState.status = INSTALLED;
+ cluster.toggleIngressDomainHelpText(ingressPreviousState, ingressNewState);
+ });
+
+ it('displays custom domain help text', () => {
+ expect(cluster.ingressDomainHelpText.classList.contains('hide')).toEqual(false);
+ });
+
+ it('updates ingress external ip address', () => {
+ expect(cluster.ingressDomainSnippet.textContent).toEqual(
+ `${ingressNewState.externalIp}${INGRESS_DOMAIN_SUFFIX}`,
+ );
+ });
+ });
+
+ describe(`when ingress application new status is different from ${INSTALLED}`, () => {
+ it('hides custom domain help text', () => {
+ ingressNewState.status = NOT_INSTALLABLE;
+ cluster.ingressDomainHelpText.classList.remove('hide');
+
+ cluster.toggleIngressDomainHelpText(ingressPreviousState, ingressNewState);
+
+ expect(cluster.ingressDomainHelpText.classList.contains('hide')).toEqual(true);
+ });
+ });
+
+ describe('when ingress application new status and old status are the same', () => {
+ it('does not modify custom domain help text', () => {
+ ingressPreviousState.status = INSTALLED;
+ ingressNewState.status = ingressPreviousState.status;
+
+ cluster.toggleIngressDomainHelpText(ingressPreviousState, ingressNewState);
+
+ expect(cluster.ingressDomainHelpText.classList.contains('hide')).toEqual(true);
+ });
+ });
+ });
});
diff --git a/spec/javascripts/clusters/components/applications_spec.js b/spec/javascripts/clusters/components/applications_spec.js
index e2466bf326c..790e4b9602c 100644
--- a/spec/javascripts/clusters/components/applications_spec.js
+++ b/spec/javascripts/clusters/components/applications_spec.js
@@ -141,7 +141,7 @@ describe('Applications', () => {
});
describe('without ip address', () => {
- it('renders an input text with a question mark and an alert text', () => {
+ it('renders an input text with a loading icon and an alert text', () => {
vm = mountComponent(Applications, {
applications: {
...APPLICATIONS_MOCK_STATE,
@@ -152,8 +152,7 @@ describe('Applications', () => {
},
});
- expect(vm.$el.querySelector('.js-endpoint').value).toEqual('?');
-
+ expect(vm.$el.querySelector('.js-ingress-ip-loading-icon')).not.toBe(null);
expect(vm.$el.querySelector('.js-no-endpoint-message')).not.toBe(null);
});
});
@@ -330,7 +329,7 @@ describe('Applications', () => {
});
describe('without ip address', () => {
- it('renders an input text with a question mark and an alert text', () => {
+ it('renders an input text with a loading icon and an alert text', () => {
vm = mountComponent(Applications, {
applications: {
...APPLICATIONS_MOCK_STATE,
@@ -342,8 +341,7 @@ describe('Applications', () => {
},
});
- expect(vm.$el.querySelector('.js-knative-endpoint').value).toEqual('?');
-
+ expect(vm.$el.querySelector('.js-knative-ip-loading-icon')).not.toBe(null);
expect(vm.$el.querySelector('.js-no-knative-endpoint-message')).not.toBe(null);
});
});
diff --git a/spec/javascripts/diffs/components/app_spec.js b/spec/javascripts/diffs/components/app_spec.js
index d81c433cca6..8d7c52a2876 100644
--- a/spec/javascripts/diffs/components/app_spec.js
+++ b/spec/javascripts/diffs/components/app_spec.js
@@ -397,4 +397,61 @@ describe('diffs/components/app', () => {
expect(wrapper.find(TreeList).exists()).toBe(true);
});
});
+
+ describe('hideTreeListIfJustOneFile', () => {
+ let toggleShowTreeList;
+
+ beforeEach(() => {
+ toggleShowTreeList = jasmine.createSpy('toggleShowTreeList');
+ });
+
+ afterEach(() => {
+ localStorage.removeItem('mr_tree_show');
+ });
+
+ it('calls toggleShowTreeList when only 1 file', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({ sha: '123' });
+ });
+
+ wrapper.setMethods({
+ toggleShowTreeList,
+ });
+
+ wrapper.vm.hideTreeListIfJustOneFile();
+
+ expect(toggleShowTreeList).toHaveBeenCalledWith(false);
+ });
+
+ it('does not call toggleShowTreeList when more than 1 file', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({ sha: '123' });
+ state.diffs.diffFiles.push({ sha: '124' });
+ });
+
+ wrapper.setMethods({
+ toggleShowTreeList,
+ });
+
+ wrapper.vm.hideTreeListIfJustOneFile();
+
+ expect(toggleShowTreeList).not.toHaveBeenCalled();
+ });
+
+ it('does not call toggleShowTreeList when localStorage is set', () => {
+ localStorage.setItem('mr_tree_show', 'true');
+
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles.push({ sha: '123' });
+ });
+
+ wrapper.setMethods({
+ toggleShowTreeList,
+ });
+
+ wrapper.vm.hideTreeListIfJustOneFile();
+
+ expect(toggleShowTreeList).not.toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/javascripts/diffs/components/diff_file_spec.js b/spec/javascripts/diffs/components/diff_file_spec.js
index ba04c8c4a4c..d9b298e84da 100644
--- a/spec/javascripts/diffs/components/diff_file_spec.js
+++ b/spec/javascripts/diffs/components/diff_file_spec.js
@@ -109,6 +109,31 @@ describe('DiffFile', () => {
done();
});
});
+
+ it('should update store state', done => {
+ spyOn(vm.$store, 'dispatch');
+
+ vm.isCollapsed = true;
+
+ vm.$nextTick(() => {
+ expect(vm.$store.dispatch).toHaveBeenCalledWith('diffs/setFileCollapsed', {
+ filePath: vm.file.file_path,
+ collapsed: true,
+ });
+
+ done();
+ });
+ });
+
+ it('updates local state when changing file state', done => {
+ vm.file.viewer.collapsed = true;
+
+ vm.$nextTick(() => {
+ expect(vm.isCollapsed).toBe(true);
+
+ done();
+ });
+ });
});
});
diff --git a/spec/javascripts/diffs/store/actions_spec.js b/spec/javascripts/diffs/store/actions_spec.js
index 070bfb2ccd0..bca99caa920 100644
--- a/spec/javascripts/diffs/store/actions_spec.js
+++ b/spec/javascripts/diffs/store/actions_spec.js
@@ -35,6 +35,7 @@ import actions, {
receiveFullDiffError,
fetchFullDiff,
toggleFullDiff,
+ setFileCollapsed,
} from '~/diffs/store/actions';
import eventHub from '~/notes/event_hub';
import * as types from '~/diffs/store/mutation_types';
@@ -733,6 +734,14 @@ describe('DiffsStoreActions', () => {
expect(localStorage.setItem).toHaveBeenCalledWith('mr_tree_show', true);
});
+
+ it('does not update localStorage', () => {
+ spyOn(localStorage, 'setItem');
+
+ toggleShowTreeList({ commit() {}, state: { showTreeList: true } }, false);
+
+ expect(localStorage.setItem).not.toHaveBeenCalled();
+ });
});
describe('renderFileForDiscussionId', () => {
@@ -977,4 +986,17 @@ describe('DiffsStoreActions', () => {
);
});
});
+
+ describe('setFileCollapsed', () => {
+ it('commits SET_FILE_COLLAPSED', done => {
+ testAction(
+ setFileCollapsed,
+ { filePath: 'test', collapsed: true },
+ null,
+ [{ type: types.SET_FILE_COLLAPSED, payload: { filePath: 'test', collapsed: true } }],
+ [],
+ done,
+ );
+ });
+ });
});
diff --git a/spec/javascripts/diffs/store/mutations_spec.js b/spec/javascripts/diffs/store/mutations_spec.js
index 270e7d75666..5556a994a14 100644
--- a/spec/javascripts/diffs/store/mutations_spec.js
+++ b/spec/javascripts/diffs/store/mutations_spec.js
@@ -58,13 +58,15 @@ describe('DiffsStoreMutations', () => {
describe('EXPAND_ALL_FILES', () => {
it('should change the collapsed prop from diffFiles', () => {
const diffFile = {
- collapsed: true,
+ viewer: {
+ collapsed: true,
+ },
};
const state = { expandAllFiles: true, diffFiles: [diffFile] };
mutations[types.EXPAND_ALL_FILES](state);
- expect(state.diffFiles[0].collapsed).toEqual(false);
+ expect(state.diffFiles[0].viewer.collapsed).toEqual(false);
});
});
@@ -742,4 +744,16 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0].isShowingFullFile).toBe(true);
});
});
+
+ describe('SET_FILE_COLLAPSED', () => {
+ it('sets collapsed', () => {
+ const state = {
+ diffFiles: [{ file_path: 'test', viewer: { collapsed: false } }],
+ };
+
+ mutations[types.SET_FILE_COLLAPSED](state, { filePath: 'test', collapsed: true });
+
+ expect(state.diffFiles[0].viewer.collapsed).toBe(true);
+ });
+ });
});
diff --git a/spec/javascripts/environments/environment_item_spec.js b/spec/javascripts/environments/environment_item_spec.js
index a89e50045da..388d7063d13 100644
--- a/spec/javascripts/environments/environment_item_spec.js
+++ b/spec/javascripts/environments/environment_item_spec.js
@@ -20,23 +20,23 @@ describe('Environment item', () => {
size: 3,
isFolder: true,
environment_path: 'url',
+ log_path: 'url',
};
component = new EnvironmentItem({
propsData: {
model: mockItem,
canReadEnvironment: true,
- service: {},
},
}).$mount();
});
- it('Should render folder icon and name', () => {
+ it('should render folder icon and name', () => {
expect(component.$el.querySelector('.folder-name').textContent).toContain(mockItem.name);
expect(component.$el.querySelector('.folder-icon')).toBeDefined();
});
- it('Should render the number of children in a badge', () => {
+ it('should render the number of children in a badge', () => {
expect(component.$el.querySelector('.folder-name .badge').textContent).toContain(
mockItem.size,
);
@@ -60,7 +60,7 @@ describe('Environment item', () => {
sha: '500aabcb17c97bdcf2d0c410b70cb8556f0362dd',
ref: {
name: 'master',
- ref_path: 'root/ci-folders/tree/master',
+ ref_url: 'root/ci-folders/tree/master',
},
tag: true,
'last?': true,
@@ -109,6 +109,7 @@ describe('Environment item', () => {
},
has_stop_action: true,
environment_path: 'root/ci-folders/environments/31',
+ log_path: 'root/ci-folders/environments/31/logs',
created_at: '2016-11-07T11:11:16.525Z',
updated_at: '2016-11-10T15:55:58.778Z',
};
@@ -117,7 +118,6 @@ describe('Environment item', () => {
propsData: {
model: environment,
canReadEnvironment: true,
- service: {},
},
}).$mount();
});
@@ -157,13 +157,13 @@ describe('Environment item', () => {
});
describe('With build url', () => {
- it('Should link to build url provided', () => {
+ it('should link to build url provided', () => {
expect(component.$el.querySelector('.build-link').getAttribute('href')).toEqual(
environment.last_deployment.deployable.build_path,
);
});
- it('Should render deployable name and id', () => {
+ it('should render deployable name and id', () => {
expect(component.$el.querySelector('.build-link').getAttribute('href')).toEqual(
environment.last_deployment.deployable.build_path,
);
@@ -178,7 +178,7 @@ describe('Environment item', () => {
});
describe('With manual actions', () => {
- it('Should render actions component', () => {
+ it('should render actions component', () => {
expect(component.$el.querySelector('.js-manual-actions-container')).toBeDefined();
});
});
@@ -190,13 +190,13 @@ describe('Environment item', () => {
});
describe('With stop action', () => {
- it('Should render stop action component', () => {
+ it('should render stop action component', () => {
expect(component.$el.querySelector('.js-stop-component-container')).toBeDefined();
});
});
describe('With retry action', () => {
- it('Should render rollback component', () => {
+ it('should render rollback component', () => {
expect(component.$el.querySelector('.js-rollback-component-container')).toBeDefined();
});
});
diff --git a/spec/javascripts/environments/environment_table_spec.js b/spec/javascripts/environments/environment_table_spec.js
index ecd28594873..a3f34232a85 100644
--- a/spec/javascripts/environments/environment_table_spec.js
+++ b/spec/javascripts/environments/environment_table_spec.js
@@ -6,6 +6,14 @@ describe('Environment table', () => {
let Component;
let vm;
+ const eeOnlyProps = {
+ canaryDeploymentFeatureId: 'canary_deployment',
+ showCanaryDeploymentCallout: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
+ };
+
beforeEach(() => {
Component = Vue.extend(environmentTableComp);
});
@@ -27,6 +35,7 @@ describe('Environment table', () => {
vm = mountComponent(Component, {
environments: [mockItem],
canReadEnvironment: true,
+ ...eeOnlyProps,
});
expect(vm.$el.getAttribute('class')).toContain('ci-table');
@@ -67,6 +76,7 @@ describe('Environment table', () => {
vm = mountComponent(Component, {
environments: mockItems,
canReadEnvironment: true,
+ ...eeOnlyProps,
});
const [old, newer, older, noDeploy] = mockItems;
@@ -130,6 +140,7 @@ describe('Environment table', () => {
vm = mountComponent(Component, {
environments: mockItems,
canReadEnvironment: true,
+ ...eeOnlyProps,
});
const [prod, review, staging] = mockItems;
@@ -166,6 +177,7 @@ describe('Environment table', () => {
vm = mountComponent(Component, {
environments: mockItems,
canReadEnvironment: true,
+ ...eeOnlyProps,
});
const [old, newer, older] = mockItems;
@@ -192,6 +204,7 @@ describe('Environment table', () => {
vm = mountComponent(Component, {
environments: mockItems,
canReadEnvironment: true,
+ ...eeOnlyProps,
});
const [old, newer, older] = mockItems;
@@ -240,6 +253,7 @@ describe('Environment table', () => {
vm = mountComponent(Component, {
environments: mockItems,
canReadEnvironment: true,
+ ...eeOnlyProps,
});
expect(vm.sortedEnvironments.map(env => env.name)).toEqual([
diff --git a/spec/javascripts/environments/environments_app_spec.js b/spec/javascripts/environments/environments_app_spec.js
index b6a244f7cd3..0dcd8868aba 100644
--- a/spec/javascripts/environments/environments_app_spec.js
+++ b/spec/javascripts/environments/environments_app_spec.js
@@ -13,6 +13,11 @@ describe('Environment', () => {
cssContainerClass: 'container',
newEnvironmentPath: 'environments/new',
helpPagePath: 'help',
+ canaryDeploymentFeatureId: 'canary_deployment',
+ showCanaryDeploymentCallout: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
};
let EnvironmentsComponent;
diff --git a/spec/javascripts/environments/environments_store_spec.js b/spec/javascripts/environments/environments_store_spec.js
index c3d16f10d72..8abdbcbbe54 100644
--- a/spec/javascripts/environments/environments_store_spec.js
+++ b/spec/javascripts/environments/environments_store_spec.js
@@ -34,54 +34,46 @@ describe('Store', () => {
expect(store.state.stoppedCounter).toEqual(2);
});
- describe('store environments', () => {
- it('should store environments', () => {
- store.storeEnvironments(serverData);
-
- expect(store.state.environments.length).toEqual(serverData.length);
- });
-
- it('should add folder keys when environment is a folder', () => {
- const environment = {
- name: 'bar',
- size: 3,
- id: 2,
- };
+ it('should add folder keys when environment is a folder', () => {
+ const environment = {
+ name: 'bar',
+ size: 3,
+ id: 2,
+ };
- store.storeEnvironments([environment]);
+ store.storeEnvironments([environment]);
- expect(store.state.environments[0].isFolder).toEqual(true);
- expect(store.state.environments[0].folderName).toEqual('bar');
- });
-
- it('should extract content of `latest` key when provided', () => {
- const environment = {
- name: 'bar',
- size: 3,
- id: 2,
- latest: {
- last_deployment: {},
- isStoppable: true,
- },
- };
-
- store.storeEnvironments([environment]);
+ expect(store.state.environments[0].isFolder).toEqual(true);
+ expect(store.state.environments[0].folderName).toEqual('bar');
+ });
- expect(store.state.environments[0].last_deployment).toEqual({});
- expect(store.state.environments[0].isStoppable).toEqual(true);
- });
+ it('should extract content of `latest` key when provided', () => {
+ const environment = {
+ name: 'bar',
+ size: 3,
+ id: 2,
+ latest: {
+ last_deployment: {},
+ isStoppable: true,
+ },
+ };
+
+ store.storeEnvironments([environment]);
+
+ expect(store.state.environments[0].last_deployment).toEqual({});
+ expect(store.state.environments[0].isStoppable).toEqual(true);
+ });
- it('should store latest.name when the environment is not a folder', () => {
- store.storeEnvironments(serverData);
+ it('should store latest.name when the environment is not a folder', () => {
+ store.storeEnvironments(serverData);
- expect(store.state.environments[0].name).toEqual(serverData[0].latest.name);
- });
+ expect(store.state.environments[0].name).toEqual(serverData[0].latest.name);
+ });
- it('should store root level name when environment is a folder', () => {
- store.storeEnvironments(serverData);
+ it('should store root level name when environment is a folder', () => {
+ store.storeEnvironments(serverData);
- expect(store.state.environments[1].folderName).toEqual(serverData[1].name);
- });
+ expect(store.state.environments[1].folderName).toEqual(serverData[1].name);
});
describe('toggleFolder', () => {
diff --git a/spec/javascripts/environments/folder/environments_folder_view_spec.js b/spec/javascripts/environments/folder/environments_folder_view_spec.js
index 69ddd26eef1..ff15067aeac 100644
--- a/spec/javascripts/environments/folder/environments_folder_view_spec.js
+++ b/spec/javascripts/environments/folder/environments_folder_view_spec.js
@@ -3,6 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import environmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import { removeBreakLine, removeWhitespace } from 'spec/helpers/vue_component_helper';
import { environmentsList } from '../mock_data';
describe('Environments Folder View', () => {
@@ -15,6 +16,11 @@ describe('Environments Folder View', () => {
folderName: 'review',
canReadEnvironment: true,
cssContainerClass: 'container',
+ canaryDeploymentFeatureId: 'canary_deployment',
+ showCanaryDeploymentCallout: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
};
beforeEach(() => {
@@ -89,9 +95,11 @@ describe('Environments Folder View', () => {
it('should render parent folder name', done => {
setTimeout(() => {
- expect(component.$el.querySelector('.js-folder-name').textContent.trim()).toContain(
- 'Environments / review',
- );
+ expect(
+ removeBreakLine(
+ removeWhitespace(component.$el.querySelector('.js-folder-name').textContent.trim()),
+ ),
+ ).toContain('Environments / review');
done();
}, 0);
});
diff --git a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
index f3dc35552d5..a72ea6ab547 100644
--- a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
@@ -293,6 +293,7 @@ describe('Filtered Search Visual Tokens', () => {
subject.addVisualTokenElement('milestone');
const token = tokensContainer.querySelector('.js-visual-token');
+ expect(token.classList.contains('search-token-milestone')).toEqual(true);
expect(token.classList.contains('filtered-search-token')).toEqual(true);
expect(token.querySelector('.name').innerText).toEqual('milestone');
expect(token.querySelector('.value')).toEqual(null);
@@ -302,6 +303,7 @@ describe('Filtered Search Visual Tokens', () => {
subject.addVisualTokenElement('label', 'Frontend');
const token = tokensContainer.querySelector('.js-visual-token');
+ expect(token.classList.contains('search-token-label')).toEqual(true);
expect(token.classList.contains('filtered-search-token')).toEqual(true);
expect(token.querySelector('.name').innerText).toEqual('label');
expect(token.querySelector('.value').innerText).toEqual('Frontend');
@@ -317,10 +319,12 @@ describe('Filtered Search Visual Tokens', () => {
const labelToken = tokens[0];
const assigneeToken = tokens[1];
+ expect(labelToken.classList.contains('search-token-label')).toEqual(true);
expect(labelToken.classList.contains('filtered-search-token')).toEqual(true);
expect(labelToken.querySelector('.name').innerText).toEqual('label');
expect(labelToken.querySelector('.value').innerText).toEqual('Frontend');
+ expect(assigneeToken.classList.contains('search-token-assignee')).toEqual(true);
expect(assigneeToken.classList.contains('filtered-search-token')).toEqual(true);
expect(assigneeToken.querySelector('.name').innerText).toEqual('assignee');
expect(assigneeToken.querySelector('.value').innerText).toEqual('@root');
diff --git a/spec/javascripts/filtered_search/visual_token_value_spec.js b/spec/javascripts/filtered_search/visual_token_value_spec.js
index f52dc26a7bb..14217d460cc 100644
--- a/spec/javascripts/filtered_search/visual_token_value_spec.js
+++ b/spec/javascripts/filtered_search/visual_token_value_spec.js
@@ -317,7 +317,18 @@ describe('Filtered Search Visual Tokens', () => {
it('does not update user token appearance for `none` filter', () => {
const { subject, tokenValueContainer, tokenValueElement } = findElements(authorToken);
- subject.tokenType = 'none';
+ subject.tokenValue = 'none';
+
+ const { updateUserTokenAppearanceSpy } = setupSpies(subject);
+ subject.render(tokenValueContainer, tokenValueElement);
+
+ expect(updateUserTokenAppearanceSpy.calls.count()).toBe(0);
+ });
+
+ it('does not update user token appearance for `None` filter', () => {
+ const { subject, tokenValueContainer, tokenValueElement } = findElements(authorToken);
+
+ subject.tokenValue = 'None';
const { updateUserTokenAppearanceSpy } = setupSpies(subject);
subject.render(tokenValueContainer, tokenValueElement);
@@ -328,7 +339,7 @@ describe('Filtered Search Visual Tokens', () => {
it('does not update user token appearance for `any` filter', () => {
const { subject, tokenValueContainer, tokenValueElement } = findElements(authorToken);
- subject.tokenType = 'any';
+ subject.tokenValue = 'any';
const { updateUserTokenAppearanceSpy } = setupSpies(subject);
subject.render(tokenValueContainer, tokenValueElement);
@@ -336,10 +347,21 @@ describe('Filtered Search Visual Tokens', () => {
expect(updateUserTokenAppearanceSpy.calls.count()).toBe(0);
});
+ it('does not update label token color for `None` filter', () => {
+ const { subject, tokenValueContainer, tokenValueElement } = findElements(bugLabelToken);
+
+ subject.tokenValue = 'None';
+
+ const { updateLabelTokenColorSpy } = setupSpies(subject);
+ subject.render(tokenValueContainer, tokenValueElement);
+
+ expect(updateLabelTokenColorSpy.calls.count()).toBe(0);
+ });
+
it('does not update label token color for `none` filter', () => {
const { subject, tokenValueContainer, tokenValueElement } = findElements(bugLabelToken);
- subject.tokenType = 'none';
+ subject.tokenValue = 'none';
const { updateLabelTokenColorSpy } = setupSpies(subject);
subject.render(tokenValueContainer, tokenValueElement);
@@ -350,7 +372,7 @@ describe('Filtered Search Visual Tokens', () => {
it('does not update label token color for `any` filter', () => {
const { subject, tokenValueContainer, tokenValueElement } = findElements(bugLabelToken);
- subject.tokenType = 'any';
+ subject.tokenValue = 'any';
const { updateLabelTokenColorSpy } = setupSpies(subject);
subject.render(tokenValueContainer, tokenValueElement);
diff --git a/spec/javascripts/fixtures/ajax_loading_spinner.html.haml b/spec/javascripts/fixtures/ajax_loading_spinner.html.haml
deleted file mode 100644
index 09d8c9df3b2..00000000000
--- a/spec/javascripts/fixtures/ajax_loading_spinner.html.haml
+++ /dev/null
@@ -1,2 +0,0 @@
-%a.js-ajax-loading-spinner{href: "http://goesnowhere.nothing/whereami", data: {remote: true}}
- %i.fa.fa-trash-o
diff --git a/spec/javascripts/fixtures/autocomplete_sources.rb b/spec/javascripts/fixtures/autocomplete_sources.rb
new file mode 100644
index 00000000000..c117fb7cd24
--- /dev/null
+++ b/spec/javascripts/fixtures/autocomplete_sources.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::AutocompleteSourcesController, '(JavaScript fixtures)', type: :controller do
+ include JavaScriptFixturesHelpers
+
+ set(:admin) { create(:admin) }
+ set(:group) { create(:group, name: 'frontend-fixtures') }
+ set(:project) { create(:project, namespace: group, path: 'autocomplete-sources-project') }
+ set(:issue) { create(:issue, project: project) }
+
+ before(:all) do
+ clean_frontend_fixtures('autocomplete_sources/')
+ end
+
+ before do
+ sign_in(admin)
+ end
+
+ it 'autocomplete_sources/labels.json' do |example|
+ issue.labels << create(:label, project: project, title: 'bug')
+ issue.labels << create(:label, project: project, title: 'critical')
+
+ create(:label, project: project, title: 'feature')
+ create(:label, project: project, title: 'documentation')
+
+ get :labels,
+ format: :json,
+ params: {
+ namespace_id: group.path,
+ project_id: project.path,
+ type: issue.class.name,
+ type_id: issue.id
+ }
+
+ expect(response).to be_success
+ store_frontend_fixture(response, example.description)
+ end
+end
diff --git a/spec/javascripts/fixtures/balsamiq_viewer.html.haml b/spec/javascripts/fixtures/balsamiq_viewer.html.haml
deleted file mode 100644
index 18166ba4901..00000000000
--- a/spec/javascripts/fixtures/balsamiq_viewer.html.haml
+++ /dev/null
@@ -1 +0,0 @@
-.file-content.balsamiq-viewer#js-balsamiq-viewer{ data: { endpoint: '/test' } }
diff --git a/spec/javascripts/fixtures/create_item_dropdown.html.haml b/spec/javascripts/fixtures/create_item_dropdown.html.haml
deleted file mode 100644
index d4d91b93caf..00000000000
--- a/spec/javascripts/fixtures/create_item_dropdown.html.haml
+++ /dev/null
@@ -1,13 +0,0 @@
-.js-create-item-dropdown-fixture-root
- %input{ name: 'variable[environment]', type: 'hidden' }
- = dropdown_tag('some label',
- options: { toggle_class: 'js-dropdown-menu-toggle',
- content_class: 'js-dropdown-content',
- filter: true,
- dropdown_class: "dropdown-menu-selectable",
- footer_content: true }) do
- %ul.dropdown-footer-list
- %li
- %button{ class: "dropdown-create-new-item-button js-dropdown-create-new-item" }
- Create wildcard
- %code
diff --git a/spec/javascripts/fixtures/emojis.rb b/spec/javascripts/fixtures/emojis.rb
deleted file mode 100644
index f5fb008c7b3..00000000000
--- a/spec/javascripts/fixtures/emojis.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-require 'spec_helper'
-
-describe 'Emojis (JavaScript fixtures)' do
- include JavaScriptFixturesHelpers
-
- before(:all) do
- clean_frontend_fixtures('emojis/')
- end
-
- it 'emojis/emojis.json' do |example|
- JavaScriptFixturesHelpers::FIXTURE_PATHS.each do |fixture_path|
- # Copying the emojis.json from the public folder
- fixture_file_name = File.expand_path('emojis/emojis.json', fixture_path)
- FileUtils.mkdir_p(File.dirname(fixture_file_name))
- FileUtils.cp(Rails.root.join('public/-/emojis/1/emojis.json'), fixture_file_name)
- end
- end
-end
diff --git a/spec/javascripts/fixtures/event_filter.html.haml b/spec/javascripts/fixtures/event_filter.html.haml
deleted file mode 100644
index aa7af61c7eb..00000000000
--- a/spec/javascripts/fixtures/event_filter.html.haml
+++ /dev/null
@@ -1,25 +0,0 @@
-%ul.nav-links.event-filter.scrolling-tabs.nav.nav-tabs
- %li.active
- %a.event-filter-link{ id: "all_event_filter", title: "Filter by all", href: "/dashboard/activity"}
- %span
- All
- %li
- %a.event-filter-link{ id: "push_event_filter", title: "Filter by push events", href: "/dashboard/activity"}
- %span
- Push events
- %li
- %a.event-filter-link{ id: "merged_event_filter", title: "Filter by merge events", href: "/dashboard/activity"}
- %span
- Merge events
- %li
- %a.event-filter-link{ id: "issue_event_filter", title: "Filter by issue events", href: "/dashboard/activity"}
- %span
- Issue events
- %li
- %a.event-filter-link{ id: "comments_event_filter", title: "Filter by comments", href: "/dashboard/activity"}
- %span
- Comments
- %li
- %a.event-filter-link{ id: "team_event_filter", title: "Filter by team", href: "/dashboard/activity"}
- %span
- Team \ No newline at end of file
diff --git a/spec/javascripts/fixtures/gl_dropdown.html.haml b/spec/javascripts/fixtures/gl_dropdown.html.haml
deleted file mode 100644
index 43d57c2c4dc..00000000000
--- a/spec/javascripts/fixtures/gl_dropdown.html.haml
+++ /dev/null
@@ -1,17 +0,0 @@
-%div
- .dropdown.inline
- %button#js-project-dropdown.dropdown-menu-toggle{type: 'button', data: {toggle: 'dropdown'}}
- .dropdown-toggle-text
- Projects
- %i.fa.fa-chevron-down.dropdown-toggle-caret.js-projects-dropdown-toggle
- .dropdown-menu.dropdown-select.dropdown-menu-selectable
- .dropdown-title
- %span Go to project
- %button.dropdown-title-button.dropdown-menu-close{aria: {label: 'Close'}}
- %i.fa.fa-times.dropdown-menu-close-icon
- .dropdown-input
- %input.dropdown-input-field{type: 'search', placeholder: 'Filter results'}
- %i.fa.fa-search.dropdown-input-search
- .dropdown-content
- .dropdown-loading
- %i.fa.fa-spinner.fa-spin
diff --git a/spec/javascripts/fixtures/gl_field_errors.html.haml b/spec/javascripts/fixtures/gl_field_errors.html.haml
deleted file mode 100644
index 69445b61367..00000000000
--- a/spec/javascripts/fixtures/gl_field_errors.html.haml
+++ /dev/null
@@ -1,15 +0,0 @@
-%form.gl-show-field-errors{action: 'submit', method: 'post'}
- .form-group
- %input.required-text{required: true, type: 'text'} Text
- .form-group
- %input.email{type: 'email', title: 'Please provide a valid email address.', required: true } Email
- .form-group
- %input.password{type: 'password', required: true} Password
- .form-group
- %input.alphanumeric{type: 'text', pattern: '[a-zA-Z0-9]', required: true} Alphanumeric
- .form-group
- %input.hidden{ type:'hidden' }
- .form-group
- %input.custom.gl-field-error-ignore{ type:'text' } Custom, do not validate
- .form-group
- %input.submit{type: 'submit'} Submit
diff --git a/spec/javascripts/fixtures/issuable_filter.html.haml b/spec/javascripts/fixtures/issuable_filter.html.haml
deleted file mode 100644
index 84fa5395cb8..00000000000
--- a/spec/javascripts/fixtures/issuable_filter.html.haml
+++ /dev/null
@@ -1,8 +0,0 @@
-%form.js-filter-form{action: '/user/project/issues?scope=all&state=closed'}
- %input{id: 'utf8', name: 'utf8', value: '✓'}
- %input{id: 'check-all-issues', name: 'check-all-issues'}
- %input{id: 'search', name: 'search'}
- %input{id: 'author_id', name: 'author_id'}
- %input{id: 'assignee_id', name: 'assignee_id'}
- %input{id: 'milestone_title', name: 'milestone_title'}
- %input{id: 'label_name', name: 'label_name'}
diff --git a/spec/javascripts/fixtures/issue_sidebar_label.html.haml b/spec/javascripts/fixtures/issue_sidebar_label.html.haml
deleted file mode 100644
index 06ce248dc9c..00000000000
--- a/spec/javascripts/fixtures/issue_sidebar_label.html.haml
+++ /dev/null
@@ -1,16 +0,0 @@
-.block.labels
- .sidebar-collapsed-icon.js-sidebar-labels-tooltip
- .title.hide-collapsed
- %a.edit-link.float-right{ href: "#" }
- Edit
- .selectbox.hide-collapsed{ style: "display: none;" }
- .dropdown
- %button.dropdown-menu-toggle.js-label-select.js-multiselect{ type: "button", data: { ability_name: "issue", field_name: "issue[label_names][]", issue_update: "/root/test/issues/2.json", labels: "/root/test/labels.json", project_id: "12", show_any: "true", show_no: "true", toggle: "dropdown" } }
- %span.dropdown-toggle-text
- Label
- %i.fa.fa-chevron-down
- .dropdown-menu.dropdown-select.dropdown-menu-paging.dropdown-menu-labels.dropdown-menu-selectable
- .dropdown-page-one
- .dropdown-content
- .dropdown-loading
- %i.fa.fa-spinner.fa-spin
diff --git a/spec/javascripts/fixtures/line_highlighter.html.haml b/spec/javascripts/fixtures/line_highlighter.html.haml
deleted file mode 100644
index 2782c50e298..00000000000
--- a/spec/javascripts/fixtures/line_highlighter.html.haml
+++ /dev/null
@@ -1,11 +0,0 @@
-.file-holder
- .file-content
- .line-numbers
- - 1.upto(25) do |i|
- %a{href: "#L#{i}", id: "L#{i}", 'data-line-number' => i}
- %i.fa.fa-link
- = i
- %pre.code.highlight
- %code
- - 1.upto(25) do |i|
- %span.line{id: "LC#{i}"}= "Line #{i}"
diff --git a/spec/javascripts/fixtures/linked_tabs.html.haml b/spec/javascripts/fixtures/linked_tabs.html.haml
deleted file mode 100644
index 632606e0536..00000000000
--- a/spec/javascripts/fixtures/linked_tabs.html.haml
+++ /dev/null
@@ -1,13 +0,0 @@
-%ul.nav.nav-tabs.new-session-tabs.linked-tabs
- %li.nav-item
- %a.nav-link{ href: 'foo/bar/1', data: { target: 'div#tab1', action: 'tab1', toggle: 'tab' } }
- Tab 1
- %li.nav-item
- %a.nav-link{ href: 'foo/bar/1/context', data: { target: 'div#tab2', action: 'tab2', toggle: 'tab' } }
- Tab 2
-
-.tab-content
- #tab1.tab-pane
- Tab 1 Content
- #tab2.tab-pane
- Tab 2 Content
diff --git a/spec/javascripts/fixtures/merge_requests_show.html.haml b/spec/javascripts/fixtures/merge_requests_show.html.haml
deleted file mode 100644
index 8447dfdda32..00000000000
--- a/spec/javascripts/fixtures/merge_requests_show.html.haml
+++ /dev/null
@@ -1,13 +0,0 @@
-%a.btn-close
-
-.detail-page-description
- .description.js-task-list-container
- .wiki
- %ul.task-list
- %li.task-list-item
- %input.task-list-item-checkbox{type: 'checkbox'}
- Task List Item
- %textarea.js-task-list-field
- \- [ ] Task List Item
-
-%form.js-issuable-update{action: '/foo'}
diff --git a/spec/javascripts/fixtures/mini_dropdown_graph.html.haml b/spec/javascripts/fixtures/mini_dropdown_graph.html.haml
deleted file mode 100644
index 74584993739..00000000000
--- a/spec/javascripts/fixtures/mini_dropdown_graph.html.haml
+++ /dev/null
@@ -1,10 +0,0 @@
-%div.js-builds-dropdown-tests.dropdown.dropdown.js-mini-pipeline-graph
- %button.js-builds-dropdown-button{'data-stage-endpoint' => 'foobar', data: { toggle: 'dropdown'} }
- Dropdown
-
- %ul.dropdown-menu.mini-pipeline-graph-dropdown-menu.js-builds-dropdown-container
- %li.js-builds-dropdown-list.scrollable-menu
- %ul
-
- %li.js-builds-dropdown-loading.hidden
- %span.fa.fa-spinner
diff --git a/spec/javascripts/fixtures/notebook_viewer.html.haml b/spec/javascripts/fixtures/notebook_viewer.html.haml
deleted file mode 100644
index 17a7a9d8f31..00000000000
--- a/spec/javascripts/fixtures/notebook_viewer.html.haml
+++ /dev/null
@@ -1 +0,0 @@
-.file-content#js-notebook-viewer{ data: { endpoint: '/test' } }
diff --git a/spec/javascripts/fixtures/oauth_remember_me.html.haml b/spec/javascripts/fixtures/oauth_remember_me.html.haml
deleted file mode 100644
index a5d7c4e816a..00000000000
--- a/spec/javascripts/fixtures/oauth_remember_me.html.haml
+++ /dev/null
@@ -1,6 +0,0 @@
-#oauth-container
- %input#remember_me{ type: "checkbox" }
-
- %a.oauth-login.twitter{ href: "http://example.com/" }
- %a.oauth-login.github{ href: "http://example.com/" }
- %a.oauth-login.facebook{ href: "http://example.com/?redirect_fragment=L1" }
diff --git a/spec/javascripts/fixtures/pdf_viewer.html.haml b/spec/javascripts/fixtures/pdf_viewer.html.haml
deleted file mode 100644
index 2e57beae54b..00000000000
--- a/spec/javascripts/fixtures/pdf_viewer.html.haml
+++ /dev/null
@@ -1 +0,0 @@
-.file-content#js-pdf-viewer{ data: { endpoint: '/test' } }
diff --git a/spec/javascripts/fixtures/pipeline_graph.html.haml b/spec/javascripts/fixtures/pipeline_graph.html.haml
deleted file mode 100644
index c0b5ab4411e..00000000000
--- a/spec/javascripts/fixtures/pipeline_graph.html.haml
+++ /dev/null
@@ -1,14 +0,0 @@
-%div.pipeline-visualization.js-pipeline-graph
- %ul.stage-column-list
- %li.stage-column
- .stage-name
- %a{:href => "/"}
- Test
- .builds-container
- %ul
- %li.build
- .curve
- %a
- %svg
- .ci-status-text
- stop_review
diff --git a/spec/javascripts/fixtures/pipelines.html.haml b/spec/javascripts/fixtures/pipelines.html.haml
deleted file mode 100644
index 0161c0550d1..00000000000
--- a/spec/javascripts/fixtures/pipelines.html.haml
+++ /dev/null
@@ -1,12 +0,0 @@
-%div
- #pipelines-list-vue{ data: { endpoint: 'foo',
- "help-page-path" => 'foo',
- "help-auto-devops-path" => 'foo',
- "empty-state-svg-path" => 'foo',
- "error-state-svg-path" => 'foo',
- "new-pipeline-path" => 'foo',
- "can-create-pipeline" => 'true',
- "has-ci" => 'foo',
- "ci-lint-path" => 'foo',
- "reset-cache-path" => 'foo' } }
-
diff --git a/spec/javascripts/fixtures/project_select_combo_button.html.haml b/spec/javascripts/fixtures/project_select_combo_button.html.haml
deleted file mode 100644
index 432cd5fcc74..00000000000
--- a/spec/javascripts/fixtures/project_select_combo_button.html.haml
+++ /dev/null
@@ -1,6 +0,0 @@
-.project-item-select-holder
- %input.project-item-select{ data: { group_id: '12345' , relative_path: 'issues/new' } }
- %a.new-project-item-link{ data: { label: 'New issue', type: 'issues' }, href: ''}
- %i.fa.fa-spinner.spin
- %a.new-project-item-select-button
- %i.fa.fa-caret-down
diff --git a/spec/javascripts/fixtures/search_autocomplete.html.haml b/spec/javascripts/fixtures/search_autocomplete.html.haml
deleted file mode 100644
index 4aa54da9411..00000000000
--- a/spec/javascripts/fixtures/search_autocomplete.html.haml
+++ /dev/null
@@ -1,9 +0,0 @@
-.search.search-form
- %form.form-inline
- .search-input-container
- .search-input-wrap
- .dropdown
- %input#search.search-input.dropdown-menu-toggle
- .dropdown-menu.dropdown-select
- .dropdown-content
- %input{ type: "hidden", class: "js-search-project-options" }
diff --git a/spec/javascripts/fixtures/signin_tabs.html.haml b/spec/javascripts/fixtures/signin_tabs.html.haml
deleted file mode 100644
index 2e00fe7865e..00000000000
--- a/spec/javascripts/fixtures/signin_tabs.html.haml
+++ /dev/null
@@ -1,5 +0,0 @@
-%ul.nav-links.new-session-tabs
- %li.active
- %a{ href: '#ldap' } LDAP
- %li
- %a{ href: '#login-pane'} Standard
diff --git a/spec/javascripts/fixtures/sketch_viewer.html.haml b/spec/javascripts/fixtures/sketch_viewer.html.haml
deleted file mode 100644
index f01bd00925a..00000000000
--- a/spec/javascripts/fixtures/sketch_viewer.html.haml
+++ /dev/null
@@ -1,2 +0,0 @@
-.file-content#js-sketch-viewer{ data: { endpoint: '/test_sketch_file.sketch' } }
- .js-loading-icon
diff --git a/spec/javascripts/fixtures/static/README.md b/spec/javascripts/fixtures/static/README.md
new file mode 100644
index 00000000000..b5c2f8233bf
--- /dev/null
+++ b/spec/javascripts/fixtures/static/README.md
@@ -0,0 +1,3 @@
+# Please do not add new files here!
+
+Instead use a Ruby file in the fixtures root directory (`spec/javascripts/fixtures/`).
diff --git a/spec/javascripts/fixtures/static/ajax_loading_spinner.html.raw b/spec/javascripts/fixtures/static/ajax_loading_spinner.html.raw
new file mode 100644
index 00000000000..0e1ebb32b1c
--- /dev/null
+++ b/spec/javascripts/fixtures/static/ajax_loading_spinner.html.raw
@@ -0,0 +1,3 @@
+<a class="js-ajax-loading-spinner" data-remote href="http://goesnowhere.nothing/whereami">
+<i class="fa fa-trash-o"></i>
+</a>
diff --git a/spec/javascripts/fixtures/static/balsamiq_viewer.html.raw b/spec/javascripts/fixtures/static/balsamiq_viewer.html.raw
new file mode 100644
index 00000000000..cdd723d1a84
--- /dev/null
+++ b/spec/javascripts/fixtures/static/balsamiq_viewer.html.raw
@@ -0,0 +1 @@
+<div class="file-content balsamiq-viewer" data-endpoint="/test" id="js-balsamiq-viewer"></div>
diff --git a/spec/javascripts/fixtures/static/create_item_dropdown.html.raw b/spec/javascripts/fixtures/static/create_item_dropdown.html.raw
new file mode 100644
index 00000000000..d2d38370092
--- /dev/null
+++ b/spec/javascripts/fixtures/static/create_item_dropdown.html.raw
@@ -0,0 +1,11 @@
+<div class="js-create-item-dropdown-fixture-root">
+<input name="variable[environment]" type="hidden">
+<div class="dropdown "><button class="dropdown-menu-toggle js-dropdown-menu-toggle" type="button" data-toggle="dropdown"><span class="dropdown-toggle-text ">some label</span><i aria-hidden="true" data-hidden="true" class="fa fa-chevron-down"></i></button><div class="dropdown-menu dropdown-select dropdown-menu-selectable"><div class="dropdown-input"><input type="search" id="" class="dropdown-input-field" autocomplete="off" /><i aria-hidden="true" data-hidden="true" class="fa fa-search dropdown-input-search"></i><i aria-hidden="true" data-hidden="true" role="button" class="fa fa-times dropdown-input-clear js-dropdown-input-clear"></i></div><div class="dropdown-content js-dropdown-content"></div><div class="dropdown-footer"><ul class="dropdown-footer-list">
+<li>
+<button class="dropdown-create-new-item-button js-dropdown-create-new-item">
+Create wildcard
+<code></code>
+</button>
+</li>
+</ul>
+</div><div class="dropdown-loading"><i aria-hidden="true" data-hidden="true" class="fa fa-spinner fa-spin"></i></div></div></div></div>
diff --git a/spec/javascripts/fixtures/static/event_filter.html.raw b/spec/javascripts/fixtures/static/event_filter.html.raw
new file mode 100644
index 00000000000..8e9b6fb1b5c
--- /dev/null
+++ b/spec/javascripts/fixtures/static/event_filter.html.raw
@@ -0,0 +1,44 @@
+<ul class="nav-links event-filter scrolling-tabs nav nav-tabs">
+<li class="active">
+<a class="event-filter-link" href="/dashboard/activity" id="all_event_filter" title="Filter by all">
+<span>
+All
+</span>
+</a>
+</li>
+<li>
+<a class="event-filter-link" href="/dashboard/activity" id="push_event_filter" title="Filter by push events">
+<span>
+Push events
+</span>
+</a>
+</li>
+<li>
+<a class="event-filter-link" href="/dashboard/activity" id="merged_event_filter" title="Filter by merge events">
+<span>
+Merge events
+</span>
+</a>
+</li>
+<li>
+<a class="event-filter-link" href="/dashboard/activity" id="issue_event_filter" title="Filter by issue events">
+<span>
+Issue events
+</span>
+</a>
+</li>
+<li>
+<a class="event-filter-link" href="/dashboard/activity" id="comments_event_filter" title="Filter by comments">
+<span>
+Comments
+</span>
+</a>
+</li>
+<li>
+<a class="event-filter-link" href="/dashboard/activity" id="team_event_filter" title="Filter by team">
+<span>
+Team
+</span>
+</a>
+</li>
+</ul>
diff --git a/spec/javascripts/fixtures/static/gl_dropdown.html.raw b/spec/javascripts/fixtures/static/gl_dropdown.html.raw
new file mode 100644
index 00000000000..08f6738414e
--- /dev/null
+++ b/spec/javascripts/fixtures/static/gl_dropdown.html.raw
@@ -0,0 +1,26 @@
+<div>
+<div class="dropdown inline">
+<button class="dropdown-menu-toggle" data-toggle="dropdown" id="js-project-dropdown" type="button">
+<div class="dropdown-toggle-text">
+Projects
+</div>
+<i class="fa fa-chevron-down dropdown-toggle-caret js-projects-dropdown-toggle"></i>
+</button>
+<div class="dropdown-menu dropdown-select dropdown-menu-selectable">
+<div class="dropdown-title">
+<span>Go to project</span>
+<button aria="{:label=&gt;&quot;Close&quot;}" class="dropdown-title-button dropdown-menu-close">
+<i class="fa fa-times dropdown-menu-close-icon"></i>
+</button>
+</div>
+<div class="dropdown-input">
+<input class="dropdown-input-field" placeholder="Filter results" type="search">
+<i class="fa fa-search dropdown-input-search"></i>
+</div>
+<div class="dropdown-content"></div>
+<div class="dropdown-loading">
+<i class="fa fa-spinner fa-spin"></i>
+</div>
+</div>
+</div>
+</div>
diff --git a/spec/javascripts/fixtures/static/gl_field_errors.html.raw b/spec/javascripts/fixtures/static/gl_field_errors.html.raw
new file mode 100644
index 00000000000..f8470e02b7c
--- /dev/null
+++ b/spec/javascripts/fixtures/static/gl_field_errors.html.raw
@@ -0,0 +1,22 @@
+<form action="submit" class="gl-show-field-errors" method="post">
+<div class="form-group">
+<input class="required-text" required type="text">Text</input>
+</div>
+<div class="form-group">
+<input class="email" required title="Please provide a valid email address." type="email">Email</input>
+</div>
+<div class="form-group">
+<input class="password" required type="password">Password</input>
+</div>
+<div class="form-group">
+<input class="alphanumeric" pattern="[a-zA-Z0-9]" required type="text">Alphanumeric</input>
+</div>
+<div class="form-group">
+<input class="hidden" type="hidden">
+</div>
+<div class="form-group">
+<input class="custom gl-field-error-ignore" type="text">Custom, do not validate</input>
+</div>
+<div class="form-group"></div>
+<input class="submit" type="submit">Submit</input>
+</form>
diff --git a/spec/javascripts/fixtures/static/issuable_filter.html.raw b/spec/javascripts/fixtures/static/issuable_filter.html.raw
new file mode 100644
index 00000000000..06b70fb43f1
--- /dev/null
+++ b/spec/javascripts/fixtures/static/issuable_filter.html.raw
@@ -0,0 +1,9 @@
+<form action="/user/project/issues?scope=all&amp;state=closed" class="js-filter-form">
+<input id="utf8" name="utf8" value="✓">
+<input id="check-all-issues" name="check-all-issues">
+<input id="search" name="search">
+<input id="author_id" name="author_id">
+<input id="assignee_id" name="assignee_id">
+<input id="milestone_title" name="milestone_title">
+<input id="label_name" name="label_name">
+</form>
diff --git a/spec/javascripts/fixtures/static/issue_sidebar_label.html.raw b/spec/javascripts/fixtures/static/issue_sidebar_label.html.raw
new file mode 100644
index 00000000000..ec8fb30f219
--- /dev/null
+++ b/spec/javascripts/fixtures/static/issue_sidebar_label.html.raw
@@ -0,0 +1,26 @@
+<div class="block labels">
+<div class="sidebar-collapsed-icon js-sidebar-labels-tooltip"></div>
+<div class="title hide-collapsed">
+<a class="edit-link float-right" href="#">
+Edit
+</a>
+</div>
+<div class="selectbox hide-collapsed" style="display: none;">
+<div class="dropdown">
+<button class="dropdown-menu-toggle js-label-select js-multiselect" data-ability-name="issue" data-field-name="issue[label_names][]" data-issue-update="/root/test/issues/2.json" data-labels="/root/test/labels.json" data-project-id="12" data-show-any="true" data-show-no="true" data-toggle="dropdown" type="button">
+<span class="dropdown-toggle-text">
+Label
+</span>
+<i class="fa fa-chevron-down"></i>
+</button>
+<div class="dropdown-menu dropdown-select dropdown-menu-paging dropdown-menu-labels dropdown-menu-selectable">
+<div class="dropdown-page-one">
+<div class="dropdown-content"></div>
+<div class="dropdown-loading">
+<i class="fa fa-spinner fa-spin"></i>
+</div>
+</div>
+</div>
+</div>
+</div>
+</div>
diff --git a/spec/javascripts/fixtures/static/line_highlighter.html.raw b/spec/javascripts/fixtures/static/line_highlighter.html.raw
new file mode 100644
index 00000000000..897a25d6760
--- /dev/null
+++ b/spec/javascripts/fixtures/static/line_highlighter.html.raw
@@ -0,0 +1,107 @@
+<div class="file-holder">
+<div class="file-content">
+<div class="line-numbers">
+<a data-line-number="1" href="#L1" id="L1">
+<i class="fa fa-link"></i>
+1
+</a>
+<a data-line-number="2" href="#L2" id="L2">
+<i class="fa fa-link"></i>
+2
+</a>
+<a data-line-number="3" href="#L3" id="L3">
+<i class="fa fa-link"></i>
+3
+</a>
+<a data-line-number="4" href="#L4" id="L4">
+<i class="fa fa-link"></i>
+4
+</a>
+<a data-line-number="5" href="#L5" id="L5">
+<i class="fa fa-link"></i>
+5
+</a>
+<a data-line-number="6" href="#L6" id="L6">
+<i class="fa fa-link"></i>
+6
+</a>
+<a data-line-number="7" href="#L7" id="L7">
+<i class="fa fa-link"></i>
+7
+</a>
+<a data-line-number="8" href="#L8" id="L8">
+<i class="fa fa-link"></i>
+8
+</a>
+<a data-line-number="9" href="#L9" id="L9">
+<i class="fa fa-link"></i>
+9
+</a>
+<a data-line-number="10" href="#L10" id="L10">
+<i class="fa fa-link"></i>
+10
+</a>
+<a data-line-number="11" href="#L11" id="L11">
+<i class="fa fa-link"></i>
+11
+</a>
+<a data-line-number="12" href="#L12" id="L12">
+<i class="fa fa-link"></i>
+12
+</a>
+<a data-line-number="13" href="#L13" id="L13">
+<i class="fa fa-link"></i>
+13
+</a>
+<a data-line-number="14" href="#L14" id="L14">
+<i class="fa fa-link"></i>
+14
+</a>
+<a data-line-number="15" href="#L15" id="L15">
+<i class="fa fa-link"></i>
+15
+</a>
+<a data-line-number="16" href="#L16" id="L16">
+<i class="fa fa-link"></i>
+16
+</a>
+<a data-line-number="17" href="#L17" id="L17">
+<i class="fa fa-link"></i>
+17
+</a>
+<a data-line-number="18" href="#L18" id="L18">
+<i class="fa fa-link"></i>
+18
+</a>
+<a data-line-number="19" href="#L19" id="L19">
+<i class="fa fa-link"></i>
+19
+</a>
+<a data-line-number="20" href="#L20" id="L20">
+<i class="fa fa-link"></i>
+20
+</a>
+<a data-line-number="21" href="#L21" id="L21">
+<i class="fa fa-link"></i>
+21
+</a>
+<a data-line-number="22" href="#L22" id="L22">
+<i class="fa fa-link"></i>
+22
+</a>
+<a data-line-number="23" href="#L23" id="L23">
+<i class="fa fa-link"></i>
+23
+</a>
+<a data-line-number="24" href="#L24" id="L24">
+<i class="fa fa-link"></i>
+24
+</a>
+<a data-line-number="25" href="#L25" id="L25">
+<i class="fa fa-link"></i>
+25
+</a>
+</div>
+<pre class="code highlight"><code><span class="line" id="LC1">Line 1</span><span class="line" id="LC2">Line 2</span><span class="line" id="LC3">Line 3</span><span class="line" id="LC4">Line 4</span><span class="line" id="LC5">Line 5</span><span class="line" id="LC6">Line 6</span><span class="line" id="LC7">Line 7</span><span class="line" id="LC8">Line 8</span><span class="line" id="LC9">Line 9</span><span class="line" id="LC10">Line 10</span><span class="line" id="LC11">Line 11</span><span class="line" id="LC12">Line 12</span><span class="line" id="LC13">Line 13</span><span class="line" id="LC14">Line 14</span><span class="line" id="LC15">Line 15</span><span class="line" id="LC16">Line 16</span><span class="line" id="LC17">Line 17</span><span class="line" id="LC18">Line 18</span><span class="line" id="LC19">Line 19</span><span class="line" id="LC20">Line 20</span><span class="line" id="LC21">Line 21</span><span class="line" id="LC22">Line 22</span><span class="line" id="LC23">Line 23</span><span class="line" id="LC24">Line 24</span><span class="line" id="LC25">Line 25</span></code></pre>
+</div>
+</div>
diff --git a/spec/javascripts/fixtures/static/linked_tabs.html.raw b/spec/javascripts/fixtures/static/linked_tabs.html.raw
new file mode 100644
index 00000000000..c25463bf1db
--- /dev/null
+++ b/spec/javascripts/fixtures/static/linked_tabs.html.raw
@@ -0,0 +1,20 @@
+<ul class="nav nav-tabs new-session-tabs linked-tabs">
+<li class="nav-item">
+<a class="nav-link" data-action="tab1" data-target="div#tab1" data-toggle="tab" href="foo/bar/1">
+Tab 1
+</a>
+</li>
+<li class="nav-item">
+<a class="nav-link" data-action="tab2" data-target="div#tab2" data-toggle="tab" href="foo/bar/1/context">
+Tab 2
+</a>
+</li>
+</ul>
+<div class="tab-content">
+<div class="tab-pane" id="tab1">
+Tab 1 Content
+</div>
+<div class="tab-pane" id="tab2">
+Tab 2 Content
+</div>
+</div>
diff --git a/spec/javascripts/fixtures/static/merge_requests_show.html.raw b/spec/javascripts/fixtures/static/merge_requests_show.html.raw
new file mode 100644
index 00000000000..e219d9462aa
--- /dev/null
+++ b/spec/javascripts/fixtures/static/merge_requests_show.html.raw
@@ -0,0 +1,15 @@
+<a class="btn-close"></a>
+<div class="detail-page-description">
+<div class="description js-task-list-container">
+<div class="wiki">
+<ul class="task-list">
+<li class="task-list-item">
+<input class="task-list-item-checkbox" type="checkbox">
+Task List Item
+</li>
+</ul>
+<textarea class="js-task-list-field">- [ ] Task List Item</textarea>
+</div>
+</div>
+</div>
+<form action="/foo" class="js-issuable-update"></form>
diff --git a/spec/javascripts/fixtures/static/mini_dropdown_graph.html.raw b/spec/javascripts/fixtures/static/mini_dropdown_graph.html.raw
new file mode 100644
index 00000000000..cd0b8dec3fc
--- /dev/null
+++ b/spec/javascripts/fixtures/static/mini_dropdown_graph.html.raw
@@ -0,0 +1,13 @@
+<div class="js-builds-dropdown-tests dropdown dropdown js-mini-pipeline-graph">
+<button class="js-builds-dropdown-button" data-toggle="dropdown" data-stage-endpoint="foobar">
+Dropdown
+</button>
+<ul class="dropdown-menu mini-pipeline-graph-dropdown-menu js-builds-dropdown-container">
+<li class="js-builds-dropdown-list scrollable-menu">
+<ul></ul>
+</li>
+<li class="js-builds-dropdown-loading hidden">
+<span class="fa fa-spinner"></span>
+</li>
+</ul>
+</div>
diff --git a/spec/javascripts/fixtures/static/notebook_viewer.html.raw b/spec/javascripts/fixtures/static/notebook_viewer.html.raw
new file mode 100644
index 00000000000..4bbb7bf1094
--- /dev/null
+++ b/spec/javascripts/fixtures/static/notebook_viewer.html.raw
@@ -0,0 +1 @@
+<div class="file-content" data-endpoint="/test" id="js-notebook-viewer"></div>
diff --git a/spec/javascripts/fixtures/static/oauth_remember_me.html.raw b/spec/javascripts/fixtures/static/oauth_remember_me.html.raw
new file mode 100644
index 00000000000..9ba1ffc72fe
--- /dev/null
+++ b/spec/javascripts/fixtures/static/oauth_remember_me.html.raw
@@ -0,0 +1,6 @@
+<div id="oauth-container">
+<input id="remember_me" type="checkbox">
+<a class="oauth-login twitter" href="http://example.com/"></a>
+<a class="oauth-login github" href="http://example.com/"></a>
+<a class="oauth-login facebook" href="http://example.com/?redirect_fragment=L1"></a>
+</div>
diff --git a/spec/javascripts/fixtures/static/pdf_viewer.html.raw b/spec/javascripts/fixtures/static/pdf_viewer.html.raw
new file mode 100644
index 00000000000..350d35a262f
--- /dev/null
+++ b/spec/javascripts/fixtures/static/pdf_viewer.html.raw
@@ -0,0 +1 @@
+<div class="file-content" data-endpoint="/test" id="js-pdf-viewer"></div>
diff --git a/spec/javascripts/fixtures/static/pipeline_graph.html.raw b/spec/javascripts/fixtures/static/pipeline_graph.html.raw
new file mode 100644
index 00000000000..422372bb7d5
--- /dev/null
+++ b/spec/javascripts/fixtures/static/pipeline_graph.html.raw
@@ -0,0 +1,24 @@
+<div class="pipeline-visualization js-pipeline-graph">
+<ul class="stage-column-list">
+<li class="stage-column">
+<div class="stage-name">
+<a href="/">
+Test
+<div class="builds-container">
+<ul>
+<li class="build">
+<div class="curve"></div>
+<a>
+<svg></svg>
+<div class="ci-status-text">
+stop_review
+</div>
+</a>
+</li>
+</ul>
+</div>
+</a>
+</div>
+</li>
+</ul>
+</div>
diff --git a/spec/javascripts/fixtures/static/pipelines.html.raw b/spec/javascripts/fixtures/static/pipelines.html.raw
new file mode 100644
index 00000000000..42333f94f2f
--- /dev/null
+++ b/spec/javascripts/fixtures/static/pipelines.html.raw
@@ -0,0 +1,3 @@
+<div>
+<div data-can-create-pipeline="true" data-ci-lint-path="foo" data-empty-state-svg-path="foo" data-endpoint="foo" data-error-state-svg-path="foo" data-has-ci="foo" data-help-auto-devops-path="foo" data-help-page-path="foo" data-new-pipeline-path="foo" data-reset-cache-path="foo" id="pipelines-list-vue"></div>
+</div>
diff --git a/spec/javascripts/fixtures/static/project_select_combo_button.html.raw b/spec/javascripts/fixtures/static/project_select_combo_button.html.raw
new file mode 100644
index 00000000000..50c826051c0
--- /dev/null
+++ b/spec/javascripts/fixtures/static/project_select_combo_button.html.raw
@@ -0,0 +1,9 @@
+<div class="project-item-select-holder">
+<input class="project-item-select" data-group-id="12345" data-relative-path="issues/new">
+<a class="new-project-item-link" data-label="New issue" data-type="issues" href="">
+<i class="fa fa-spinner spin"></i>
+</a>
+<a class="new-project-item-select-button">
+<i class="fa fa-caret-down"></i>
+</a>
+</div>
diff --git a/spec/javascripts/fixtures/static/search_autocomplete.html.raw b/spec/javascripts/fixtures/static/search_autocomplete.html.raw
new file mode 100644
index 00000000000..29db9020424
--- /dev/null
+++ b/spec/javascripts/fixtures/static/search_autocomplete.html.raw
@@ -0,0 +1,15 @@
+<div class="search search-form">
+<form class="form-inline">
+<div class="search-input-container">
+<div class="search-input-wrap">
+<div class="dropdown">
+<input class="search-input dropdown-menu-toggle" id="search">
+<div class="dropdown-menu dropdown-select">
+<div class="dropdown-content"></div>
+</div>
+</div>
+</div>
+</div>
+<input class="js-search-project-options" type="hidden">
+</form>
+</div>
diff --git a/spec/javascripts/fixtures/static/signin_tabs.html.raw b/spec/javascripts/fixtures/static/signin_tabs.html.raw
new file mode 100644
index 00000000000..7e66ab9394b
--- /dev/null
+++ b/spec/javascripts/fixtures/static/signin_tabs.html.raw
@@ -0,0 +1,8 @@
+<ul class="nav-links new-session-tabs">
+<li class="active">
+<a href="#ldap">LDAP</a>
+</li>
+<li>
+<a href="#login-pane">Standard</a>
+</li>
+</ul>
diff --git a/spec/javascripts/fixtures/static/sketch_viewer.html.raw b/spec/javascripts/fixtures/static/sketch_viewer.html.raw
new file mode 100644
index 00000000000..e25e554e568
--- /dev/null
+++ b/spec/javascripts/fixtures/static/sketch_viewer.html.raw
@@ -0,0 +1,3 @@
+<div class="file-content" data-endpoint="/test_sketch_file.sketch" id="js-sketch-viewer">
+<div class="js-loading-icon"></div>
+</div>
diff --git a/spec/javascripts/fixtures/static_fixtures.rb b/spec/javascripts/fixtures/static_fixtures.rb
index 852a82587b9..b5188eeb994 100644
--- a/spec/javascripts/fixtures/static_fixtures.rb
+++ b/spec/javascripts/fixtures/static_fixtures.rb
@@ -3,29 +3,17 @@ require 'spec_helper'
describe ApplicationController, '(Static JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
- before(:all) do
- clean_frontend_fixtures('static/')
- end
-
- JavaScriptFixturesHelpers::FIXTURE_PATHS.each do |fixture_path|
- fixtures_path = File.expand_path(fixture_path, Rails.root)
-
- Dir.glob(File.expand_path('**/*.haml', fixtures_path)).map do |file_path|
- template_file_name = file_path.sub(/\A#{fixtures_path}#{File::SEPARATOR}/, '')
-
- it "static/#{template_file_name.sub(/\.haml\z/, '.raw')}" do |example|
- fixture_file_name = example.description
- rendered = render_template(fixture_path, template_file_name)
- store_frontend_fixture(rendered, fixture_file_name)
- end
+ Dir.glob('{,ee/}spec/javascripts/fixtures/**/*.haml').map do |file_path|
+ it "static/#{file_path.sub(%r{\A(ee/)?spec/javascripts/fixtures/}, '').sub(/\.haml\z/, '.raw')}" do |example|
+ store_frontend_fixture(render_template(file_path), example.description)
end
end
private
- def render_template(fixture_path, template_file_name)
+ def render_template(template_file_name)
controller = ApplicationController.new
- controller.prepend_view_path(fixture_path)
- controller.render_to_string(template: template_file_name, layout: false)
+ controller.prepend_view_path(File.dirname(template_file_name))
+ controller.render_to_string(template: File.basename(template_file_name), layout: false)
end
end
diff --git a/spec/javascripts/frequent_items/components/app_spec.js b/spec/javascripts/frequent_items/components/app_spec.js
index b1cc4d8dc8d..6814f656f5d 100644
--- a/spec/javascripts/frequent_items/components/app_spec.js
+++ b/spec/javascripts/frequent_items/components/app_spec.js
@@ -194,7 +194,7 @@ describe('Frequent Items App Component', () => {
expect(loadingEl).toBeDefined();
expect(loadingEl.classList.contains('prepend-top-20')).toBe(true);
- expect(loadingEl.querySelector('i').getAttribute('aria-label')).toBe('Loading projects');
+ expect(loadingEl.querySelector('span').getAttribute('aria-label')).toBe('Loading projects');
done();
});
});
diff --git a/spec/javascripts/groups/components/app_spec.js b/spec/javascripts/groups/components/app_spec.js
index d832441dc93..31873311e16 100644
--- a/spec/javascripts/groups/components/app_spec.js
+++ b/spec/javascripts/groups/components/app_spec.js
@@ -502,7 +502,7 @@ describe('AppComponent', () => {
vm.isLoading = true;
Vue.nextTick(() => {
expect(vm.$el.querySelector('.loading-animation')).toBeDefined();
- expect(vm.$el.querySelector('i.fa').getAttribute('aria-label')).toBe('Loading groups');
+ expect(vm.$el.querySelector('span').getAttribute('aria-label')).toBe('Loading groups');
done();
});
});
diff --git a/spec/javascripts/helpers/filtered_search_spec_helper.js b/spec/javascripts/helpers/filtered_search_spec_helper.js
index 8933dd5def4..fd06bb1f324 100644
--- a/spec/javascripts/helpers/filtered_search_spec_helper.js
+++ b/spec/javascripts/helpers/filtered_search_spec_helper.js
@@ -5,7 +5,7 @@ export default class FilteredSearchSpecHelper {
static createFilterVisualToken(name, value, isSelected = false) {
const li = document.createElement('li');
- li.classList.add('js-visual-token', 'filtered-search-token');
+ li.classList.add('js-visual-token', 'filtered-search-token', `search-token-${name}`);
li.innerHTML = `
<div class="selectable ${isSelected ? 'selected' : ''}" role="button">
diff --git a/spec/javascripts/ide/components/commit_sidebar/radio_group_spec.js b/spec/javascripts/ide/components/commit_sidebar/radio_group_spec.js
index ffc2a4c9ddb..db1988be3e1 100644
--- a/spec/javascripts/ide/components/commit_sidebar/radio_group_spec.js
+++ b/spec/javascripts/ide/components/commit_sidebar/radio_group_spec.js
@@ -76,6 +76,7 @@ describe('IDE commit sidebar radio group', () => {
const Component = Vue.extend(radioGroup);
store.state.commit.commitAction = '1';
+ store.state.commit.newBranchName = 'test-123';
vm = createComponentWithStore(Component, store, {
value: '1',
@@ -113,6 +114,12 @@ describe('IDE commit sidebar radio group', () => {
done();
});
});
+
+ it('renders newBranchName if present', () => {
+ const input = vm.$el.querySelector('.form-control');
+
+ expect(input.value).toBe('test-123');
+ });
});
describe('tooltipTitle', () => {
diff --git a/spec/javascripts/ide/components/new_dropdown/modal_spec.js b/spec/javascripts/ide/components/new_dropdown/modal_spec.js
index d1a0964ccdd..0556feae46a 100644
--- a/spec/javascripts/ide/components/new_dropdown/modal_spec.js
+++ b/spec/javascripts/ide/components/new_dropdown/modal_spec.js
@@ -109,6 +109,18 @@ describe('new file modal component', () => {
expect(vm.entryName).toBe('index.js');
});
+
+ it('removes leading/trailing spaces when found in the new name', () => {
+ vm.entryName = ' index.js ';
+
+ expect(vm.entryName).toBe('index.js');
+ });
+
+ it('does not remove internal spaces in the file name', () => {
+ vm.entryName = ' In Praise of Idleness.txt ';
+
+ expect(vm.entryName).toBe('In Praise of Idleness.txt');
+ });
});
});
diff --git a/spec/javascripts/ide/stores/modules/commit/actions_spec.js b/spec/javascripts/ide/stores/modules/commit/actions_spec.js
index 06b8b452319..34d97347438 100644
--- a/spec/javascripts/ide/stores/modules/commit/actions_spec.js
+++ b/spec/javascripts/ide/stores/modules/commit/actions_spec.js
@@ -396,7 +396,7 @@ describe('IDE commit module actions', () => {
.then(() => {
expect(visitUrl).toHaveBeenCalledWith(
`webUrl/merge_requests/new?merge_request[source_branch]=${
- store.getters['commit/newBranchName']
+ store.getters['commit/placeholderBranchName']
}&merge_request[target_branch]=master`,
);
diff --git a/spec/javascripts/ide/stores/modules/commit/getters_spec.js b/spec/javascripts/ide/stores/modules/commit/getters_spec.js
index 3f4bf407a1f..702e78ef773 100644
--- a/spec/javascripts/ide/stores/modules/commit/getters_spec.js
+++ b/spec/javascripts/ide/stores/modules/commit/getters_spec.js
@@ -29,11 +29,11 @@ describe('IDE commit module getters', () => {
});
});
- describe('newBranchName', () => {
+ describe('placeholderBranchName', () => {
it('includes username, currentBranchId, patch & random number', () => {
gon.current_username = 'username';
- const branch = getters.newBranchName(state, null, {
+ const branch = getters.placeholderBranchName(state, null, {
currentBranchId: 'testing',
});
@@ -46,7 +46,7 @@ describe('IDE commit module getters', () => {
currentBranchId: 'master',
};
const localGetters = {
- newBranchName: 'newBranchName',
+ placeholderBranchName: 'newBranchName',
};
beforeEach(() => {
@@ -71,7 +71,7 @@ describe('IDE commit module getters', () => {
expect(getters.branchName(state, localGetters, rootState)).toBe('state-newBranchName');
});
- it('uses getters newBranchName when state newBranchName is empty', () => {
+ it('uses placeholderBranchName when state newBranchName is empty', () => {
Object.assign(state, {
newBranchName: '',
});
diff --git a/spec/javascripts/jobs/components/stages_dropdown_spec.js b/spec/javascripts/jobs/components/stages_dropdown_spec.js
index 9c731ae2f68..eccb4e13d67 100644
--- a/spec/javascripts/jobs/components/stages_dropdown_spec.js
+++ b/spec/javascripts/jobs/components/stages_dropdown_spec.js
@@ -1,59 +1,167 @@
import Vue from 'vue';
import component from '~/jobs/components/stages_dropdown.vue';
+import { trimText } from 'spec/helpers/vue_component_helper';
import mountComponent from '../../helpers/vue_mount_component_helper';
describe('Stages Dropdown', () => {
const Component = Vue.extend(component);
let vm;
- beforeEach(() => {
- vm = mountComponent(Component, {
- pipeline: {
- id: 28029444,
- details: {
- status: {
- details_path: '/gitlab-org/gitlab-ce/pipelines/28029444',
- group: 'success',
- has_details: true,
- icon: 'status_success',
- label: 'passed',
- text: 'passed',
- tooltip: 'passed',
- },
- },
- path: 'pipeline/28029444',
+ const mockPipelineData = {
+ id: 28029444,
+ details: {
+ status: {
+ details_path: '/gitlab-org/gitlab-ce/pipelines/28029444',
+ group: 'success',
+ has_details: true,
+ icon: 'status_success',
+ label: 'passed',
+ text: 'passed',
+ tooltip: 'passed',
},
- stages: [
- {
- name: 'build',
- },
- {
- name: 'test',
- },
- ],
- selectedStage: 'deploy',
+ },
+ path: 'pipeline/28029444',
+ flags: {
+ merge_request_pipeline: true,
+ detached_merge_request_pipeline: false,
+ },
+ merge_request: {
+ iid: 1234,
+ path: '/root/detached-merge-request-pipelines/merge_requests/1',
+ title: 'Update README.md',
+ source_branch: 'feature-1234',
+ source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1234',
+ target_branch: 'master',
+ target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
+ },
+ ref: {
+ name: 'test-branch',
+ },
+ };
+
+ describe('without a merge request pipeline', () => {
+ let pipeline;
+
+ beforeEach(() => {
+ pipeline = JSON.parse(JSON.stringify(mockPipelineData));
+ delete pipeline.merge_request;
+ delete pipeline.flags.merge_request_pipeline;
+ delete pipeline.flags.detached_merge_request_pipeline;
+
+ vm = mountComponent(Component, {
+ pipeline,
+ stages: [{ name: 'build' }, { name: 'test' }],
+ selectedStage: 'deploy',
+ });
});
- });
- afterEach(() => {
- vm.$destroy();
- });
+ afterEach(() => {
+ vm.$destroy();
+ });
- it('renders pipeline status', () => {
- expect(vm.$el.querySelector('.js-ci-status-icon-success')).not.toBeNull();
- });
+ it('renders pipeline status', () => {
+ expect(vm.$el.querySelector('.js-ci-status-icon-success')).not.toBeNull();
+ });
+
+ it('renders pipeline link', () => {
+ expect(vm.$el.querySelector('.js-pipeline-path').getAttribute('href')).toEqual(
+ 'pipeline/28029444',
+ );
+ });
+
+ it('renders dropdown with stages', () => {
+ expect(vm.$el.querySelector('.dropdown .js-stage-item').textContent).toContain('build');
+ });
+
+ it('rendes selected stage', () => {
+ expect(vm.$el.querySelector('.dropdown .js-selected-stage').textContent).toContain('deploy');
+ });
- it('renders pipeline link', () => {
- expect(vm.$el.querySelector('.js-pipeline-path').getAttribute('href')).toEqual(
- 'pipeline/28029444',
- );
+ it(`renders the pipeline info text like "Pipeline #123 for source_branch"`, () => {
+ const expected = `Pipeline #${pipeline.id} for ${pipeline.ref.name}`;
+ const actual = trimText(vm.$el.querySelector('.js-pipeline-info').innerText);
+
+ expect(actual).toBe(expected);
+ });
});
- it('renders dropdown with stages', () => {
- expect(vm.$el.querySelector('.dropdown .js-stage-item').textContent).toContain('build');
+ describe('with an "attached" merge request pipeline', () => {
+ let pipeline;
+
+ beforeEach(() => {
+ pipeline = JSON.parse(JSON.stringify(mockPipelineData));
+ pipeline.flags.merge_request_pipeline = true;
+ pipeline.flags.detached_merge_request_pipeline = false;
+
+ vm = mountComponent(Component, {
+ pipeline,
+ stages: [],
+ selectedStage: 'deploy',
+ });
+ });
+
+ it(`renders the pipeline info text like "Pipeline #123 for !456 with source_branch into target_branch"`, () => {
+ const expected = `Pipeline #${pipeline.id} for !${pipeline.merge_request.iid} with ${
+ pipeline.merge_request.source_branch
+ } into ${pipeline.merge_request.target_branch}`;
+ const actual = trimText(vm.$el.querySelector('.js-pipeline-info').innerText);
+
+ expect(actual).toBe(expected);
+ });
+
+ it(`renders the correct merge request link`, () => {
+ const actual = vm.$el.querySelector('.js-mr-link').href;
+
+ expect(actual).toContain(pipeline.merge_request.path);
+ });
+
+ it(`renders the correct source branch link`, () => {
+ const actual = vm.$el.querySelector('.js-source-branch-link').href;
+
+ expect(actual).toContain(pipeline.merge_request.source_branch_path);
+ });
+
+ it(`renders the correct target branch link`, () => {
+ const actual = vm.$el.querySelector('.js-target-branch-link').href;
+
+ expect(actual).toContain(pipeline.merge_request.target_branch_path);
+ });
});
- it('rendes selected stage', () => {
- expect(vm.$el.querySelector('.dropdown .js-selected-stage').textContent).toContain('deploy');
+ describe('with a detached merge request pipeline', () => {
+ let pipeline;
+
+ beforeEach(() => {
+ pipeline = JSON.parse(JSON.stringify(mockPipelineData));
+ pipeline.flags.merge_request_pipeline = false;
+ pipeline.flags.detached_merge_request_pipeline = true;
+
+ vm = mountComponent(Component, {
+ pipeline,
+ stages: [],
+ selectedStage: 'deploy',
+ });
+ });
+
+ it(`renders the pipeline info like "Pipeline #123 for !456 with source_branch"`, () => {
+ const expected = `Pipeline #${pipeline.id} for !${pipeline.merge_request.iid} with ${
+ pipeline.merge_request.source_branch
+ }`;
+ const actual = trimText(vm.$el.querySelector('.js-pipeline-info').innerText);
+
+ expect(actual).toBe(expected);
+ });
+
+ it(`renders the correct merge request link`, () => {
+ const actual = vm.$el.querySelector('.js-mr-link').href;
+
+ expect(actual).toContain(pipeline.merge_request.path);
+ });
+
+ it(`renders the correct source branch link`, () => {
+ const actual = vm.$el.querySelector('.js-source-branch-link').href;
+
+ expect(actual).toContain(pipeline.merge_request.source_branch_path);
+ });
});
});
diff --git a/spec/javascripts/notes/components/note_form_spec.js b/spec/javascripts/notes/components/note_form_spec.js
index 7cc324cfe44..c48f8188105 100644
--- a/spec/javascripts/notes/components/note_form_spec.js
+++ b/spec/javascripts/notes/components/note_form_spec.js
@@ -5,11 +5,33 @@ import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import { noteableDataMock, notesDataMock } from '../mock_data';
describe('issue_note_form component', () => {
+ const dummyAutosaveKey = 'some-autosave-key';
+ const dummyDraft = 'dummy draft content';
+
let store;
let wrapper;
let props;
+ const createComponentWrapper = () => {
+ const localVue = createLocalVue();
+ return shallowMount(NoteForm, {
+ store,
+ propsData: props,
+ // see https://gitlab.com/gitlab-org/gitlab-ce/issues/56317 for the following
+ localVue,
+ sync: false,
+ });
+ };
+
beforeEach(() => {
+ spyOnDependency(NoteForm, 'getDraft').and.callFake(key => {
+ if (key === dummyAutosaveKey) {
+ return dummyDraft;
+ }
+
+ return null;
+ });
+
store = createStore();
store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
@@ -20,14 +42,7 @@ describe('issue_note_form component', () => {
noteId: '545',
};
- const localVue = createLocalVue();
- wrapper = shallowMount(NoteForm, {
- store,
- propsData: props,
- // see https://gitlab.com/gitlab-org/gitlab-ce/issues/56317 for the following
- localVue,
- sync: false,
- });
+ wrapper = createComponentWrapper();
});
afterEach(() => {
@@ -181,4 +196,67 @@ describe('issue_note_form component', () => {
});
});
});
+
+ describe('with autosaveKey', () => {
+ beforeEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('with draft', () => {
+ beforeEach(done => {
+ Object.assign(props, {
+ noteBody: '',
+ autosaveKey: dummyAutosaveKey,
+ });
+ wrapper = createComponentWrapper();
+
+ wrapper.vm
+ .$nextTick()
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('displays the draft in textarea', () => {
+ const textarea = wrapper.find('textarea');
+
+ expect(textarea.element.value).toBe(dummyDraft);
+ });
+ });
+
+ describe('without draft', () => {
+ beforeEach(done => {
+ Object.assign(props, {
+ noteBody: '',
+ autosaveKey: 'some key without draft',
+ });
+ wrapper = createComponentWrapper();
+
+ wrapper.vm
+ .$nextTick()
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('leaves the textarea empty', () => {
+ const textarea = wrapper.find('textarea');
+
+ expect(textarea.element.value).toBe('');
+ });
+ });
+
+ it('updates the draft if textarea content changes', () => {
+ const updateDraftSpy = spyOnDependency(NoteForm, 'updateDraft').and.stub();
+ Object.assign(props, {
+ noteBody: '',
+ autosaveKey: dummyAutosaveKey,
+ });
+ wrapper = createComponentWrapper();
+ const textarea = wrapper.find('textarea');
+ const dummyContent = 'some new content';
+
+ textarea.setValue(dummyContent);
+
+ expect(updateDraftSpy).toHaveBeenCalledWith(dummyAutosaveKey, dummyContent);
+ });
+ });
});
diff --git a/spec/javascripts/notes/components/noteable_discussion_spec.js b/spec/javascripts/notes/components/noteable_discussion_spec.js
index 2b93fb9fb45..3304c79cdb7 100644
--- a/spec/javascripts/notes/components/noteable_discussion_spec.js
+++ b/spec/javascripts/notes/components/noteable_discussion_spec.js
@@ -3,6 +3,7 @@ import createStore from '~/notes/stores';
import noteableDiscussion from '~/notes/components/noteable_discussion.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';
+import NoteForm from '~/notes/components/note_form.vue';
import '~/behaviors/markdown/render_gfm';
import { noteableDataMock, discussionMock, notesDataMock } from '../mock_data';
import mockDiffFile from '../../diffs/mock_data/diff_file';
@@ -72,7 +73,18 @@ describe('noteable_discussion component', () => {
.then(() => wrapper.vm.$nextTick())
.then(() => {
expect(wrapper.vm.isReplying).toEqual(true);
- expect(wrapper.vm.$refs.noteForm).not.toBeNull();
+
+ const noteForm = wrapper.find(NoteForm);
+
+ expect(noteForm.exists()).toBe(true);
+
+ const noteFormProps = noteForm.props();
+
+ expect(noteFormProps.discussion).toBe(discussionMock);
+ expect(noteFormProps.isEditing).toBe(false);
+ expect(noteFormProps.line).toBe(null);
+ expect(noteFormProps.saveButtonTitle).toBe('Comment');
+ expect(noteFormProps.autosaveKey).toBe(`Note/Issue/${discussionMock.id}/Reply`);
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/pipelines/graph/stage_column_component_spec.js b/spec/javascripts/pipelines/graph/stage_column_component_spec.js
index d0b8f877d6f..dafb892da43 100644
--- a/spec/javascripts/pipelines/graph/stage_column_component_spec.js
+++ b/spec/javascripts/pipelines/graph/stage_column_component_spec.js
@@ -35,6 +35,7 @@ describe('stage column component', () => {
component = mountComponent(StageColumnComponent, {
title: 'foo',
groups: mockGroups,
+ hasTriggeredBy: false,
});
});
@@ -61,6 +62,7 @@ describe('stage column component', () => {
},
],
title: 'test',
+ hasTriggeredBy: false,
});
expect(component.$el.querySelector('.builds-container li').getAttribute('id')).toEqual(
diff --git a/spec/javascripts/pipelines/pipeline_url_spec.js b/spec/javascripts/pipelines/pipeline_url_spec.js
index ea917b36526..faad49a78b0 100644
--- a/spec/javascripts/pipelines/pipeline_url_spec.js
+++ b/spec/javascripts/pipelines/pipeline_url_spec.js
@@ -100,7 +100,8 @@ describe('Pipeline Url Component', () => {
latest: true,
yaml_errors: true,
stuck: true,
- merge_request: true,
+ merge_request_pipeline: true,
+ detached_merge_request_pipeline: true,
},
},
autoDevopsHelpPath: 'foo',
@@ -108,15 +109,16 @@ describe('Pipeline Url Component', () => {
}).$mount();
expect(component.$el.querySelector('.js-pipeline-url-latest').textContent).toContain('latest');
+
expect(component.$el.querySelector('.js-pipeline-url-yaml').textContent).toContain(
'yaml invalid',
);
- expect(component.$el.querySelector('.js-pipeline-url-mergerequest').textContent).toContain(
- 'merge request',
- );
-
expect(component.$el.querySelector('.js-pipeline-url-stuck').textContent).toContain('stuck');
+
+ expect(component.$el.querySelector('.js-pipeline-url-detached').textContent).toContain(
+ 'detached',
+ );
});
it('should render a badge for autodevops', () => {
diff --git a/spec/javascripts/registry/components/app_spec.js b/spec/javascripts/registry/components/app_spec.js
index 67118ac03a5..76a17e6fb31 100644
--- a/spec/javascripts/registry/components/app_spec.js
+++ b/spec/javascripts/registry/components/app_spec.js
@@ -99,7 +99,7 @@ describe('Registry List', () => {
it('should render a loading spinner', done => {
Vue.nextTick(() => {
- expect(vm.$el.querySelector('.fa-spinner')).not.toBe(null);
+ expect(vm.$el.querySelector('.spinner')).not.toBe(null);
done();
});
});
diff --git a/spec/javascripts/reports/components/grouped_test_reports_app_spec.js b/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
index 69767d9cf1c..a17494966a3 100644
--- a/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
+++ b/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
@@ -61,7 +61,7 @@ describe('Grouped Test Reports App', () => {
it('renders success summary text', done => {
setTimeout(() => {
- expect(vm.$el.querySelector('.fa-spinner')).not.toBeNull();
+ expect(vm.$el.querySelector('.spinner')).not.toBeNull();
expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
'Test summary results are being parsed',
);
@@ -81,7 +81,7 @@ describe('Grouped Test Reports App', () => {
it('renders failed summary text + new badge', done => {
setTimeout(() => {
- expect(vm.$el.querySelector('.fa-spinner')).toBeNull();
+ expect(vm.$el.querySelector('.spinner')).toBeNull();
expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
'Test summary contained 2 failed test results out of 11 total tests',
);
@@ -109,7 +109,7 @@ describe('Grouped Test Reports App', () => {
it('renders summary text', done => {
setTimeout(() => {
- expect(vm.$el.querySelector('.fa-spinner')).toBeNull();
+ expect(vm.$el.querySelector('.spinner')).toBeNull();
expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
'Test summary contained 2 failed test results and 2 fixed test results out of 11 total tests',
);
@@ -137,7 +137,7 @@ describe('Grouped Test Reports App', () => {
it('renders summary text', done => {
setTimeout(() => {
- expect(vm.$el.querySelector('.fa-spinner')).toBeNull();
+ expect(vm.$el.querySelector('.spinner')).toBeNull();
expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
'Test summary contained 2 fixed test results out of 11 total tests',
);
@@ -190,7 +190,7 @@ describe('Grouped Test Reports App', () => {
});
it('renders loading summary text with loading icon', done => {
- expect(vm.$el.querySelector('.fa-spinner')).not.toBeNull();
+ expect(vm.$el.querySelector('.spinner')).not.toBeNull();
expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
'Test summary results are being parsed',
);
diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js
index 5eef5682bbd..235a17d13b0 100644
--- a/spec/javascripts/test_bundle.js
+++ b/spec/javascripts/test_bundle.js
@@ -69,7 +69,7 @@ window.gl = window.gl || {};
window.gl.TEST_HOST = TEST_HOST;
window.gon = window.gon || {};
window.gon.test_env = true;
-window.gon.ee = false;
+window.gon.ee = process.env.EE;
gon.relative_url_root = '';
let hasUnhandledPromiseRejections = false;
@@ -122,19 +122,26 @@ afterEach(() => {
const axiosDefaultAdapter = getDefaultAdapter();
// render all of our tests
-const testsContext = require.context('.', true, /_spec$/);
-testsContext.keys().forEach(function(path) {
- try {
- testsContext(path);
- } catch (err) {
- console.log(err);
- console.error('[GL SPEC RUNNER ERROR] Unable to load spec: ', path);
- describe('Test bundle', function() {
- it(`includes '${path}'`, function() {
- expect(err).toBeNull();
+const testContexts = [require.context('spec', true, /_spec$/)];
+
+if (process.env.EE) {
+ testContexts.push(require.context('ee_spec', true, /_spec$/));
+}
+
+testContexts.forEach(context => {
+ context.keys().forEach(path => {
+ try {
+ context(path);
+ } catch (err) {
+ console.log(err);
+ console.error('[GL SPEC RUNNER ERROR] Unable to load spec: ', path);
+ describe('Test bundle', function() {
+ it(`includes '${path}'`, function() {
+ expect(err).toBeNull();
+ });
});
- });
- }
+ }
+ });
});
describe('test errors', () => {
@@ -204,24 +211,35 @@ if (process.env.BABEL_ENV === 'coverage') {
];
describe('Uncovered files', function() {
- const sourceFiles = require.context('~', true, /\.(js|vue)$/);
+ const sourceFilesContexts = [require.context('~', true, /\.(js|vue)$/)];
+
+ if (process.env.EE) {
+ sourceFilesContexts.push(require.context('ee', true, /\.(js|vue)$/));
+ }
+
+ const allTestFiles = testContexts.reduce(
+ (accumulator, context) => accumulator.concat(context.keys()),
+ [],
+ );
$.holdReady(true);
- sourceFiles.keys().forEach(function(path) {
- // ignore if there is a matching spec file
- if (testsContext.keys().indexOf(`${path.replace(/\.(js|vue)$/, '')}_spec`) > -1) {
- return;
- }
-
- it(`includes '${path}'`, function() {
- try {
- sourceFiles(path);
- } catch (err) {
- if (troubleMakers.indexOf(path) === -1) {
- expect(err).toBeNull();
- }
+ sourceFilesContexts.forEach(context => {
+ context.keys().forEach(path => {
+ // ignore if there is a matching spec file
+ if (allTestFiles.indexOf(`${path.replace(/\.(js|vue)$/, '')}_spec`) > -1) {
+ return;
}
+
+ it(`includes '${path}'`, function() {
+ try {
+ context(path);
+ } catch (err) {
+ if (troubleMakers.indexOf(path) === -1) {
+ expect(err).toBeNull();
+ }
+ }
+ });
});
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
index d905bbe4040..de213210cfc 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_pipeline_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import pipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import { trimText } from 'spec/helpers/vue_component_helper';
import mockData from '../mock_data';
describe('MRWidgetPipeline', () => {
@@ -123,7 +124,7 @@ describe('MRWidgetPipeline', () => {
describe('without commit path', () => {
beforeEach(() => {
- const mockCopy = Object.assign({}, mockData);
+ const mockCopy = JSON.parse(JSON.stringify(mockData));
delete mockCopy.pipeline.commit;
vm = mountComponent(Component, {
@@ -164,7 +165,7 @@ describe('MRWidgetPipeline', () => {
describe('without coverage', () => {
it('should not render a coverage', () => {
- const mockCopy = Object.assign({}, mockData);
+ const mockCopy = JSON.parse(JSON.stringify(mockData));
delete mockCopy.pipeline.coverage;
vm = mountComponent(Component, {
@@ -180,7 +181,7 @@ describe('MRWidgetPipeline', () => {
describe('without a pipeline graph', () => {
it('should not render a pipeline graph', () => {
- const mockCopy = Object.assign({}, mockData);
+ const mockCopy = JSON.parse(JSON.stringify(mockData));
delete mockCopy.pipeline.details.stages;
vm = mountComponent(Component, {
@@ -193,5 +194,81 @@ describe('MRWidgetPipeline', () => {
expect(vm.$el.querySelector('.js-mini-pipeline-graph')).toEqual(null);
});
});
+
+ describe('without pipeline.merge_request', () => {
+ it('should render info that includes the commit and branch details', () => {
+ const mockCopy = JSON.parse(JSON.stringify(mockData));
+ delete mockCopy.pipeline.merge_request;
+ const { pipeline } = mockCopy;
+
+ vm = mountComponent(Component, {
+ pipeline,
+ hasCi: true,
+ ciStatus: 'success',
+ troubleshootingDocsPath: 'help',
+ sourceBranchLink: mockCopy.source_branch_link,
+ });
+
+ const expected = `Pipeline #${pipeline.id} ${pipeline.details.status.label} for ${
+ pipeline.commit.short_id
+ } on ${mockCopy.source_branch_link}`;
+
+ const actual = trimText(vm.$el.querySelector('.js-pipeline-info-container').innerText);
+
+ expect(actual).toBe(expected);
+ });
+ });
+
+ describe('with pipeline.merge_request and flags.merge_request_pipeline', () => {
+ it('should render info that includes the commit, MR, source branch, and target branch details', () => {
+ const mockCopy = JSON.parse(JSON.stringify(mockData));
+ const { pipeline } = mockCopy;
+ pipeline.flags.merge_request_pipeline = true;
+ pipeline.flags.detached_merge_request_pipeline = false;
+
+ vm = mountComponent(Component, {
+ pipeline,
+ hasCi: true,
+ ciStatus: 'success',
+ troubleshootingDocsPath: 'help',
+ sourceBranchLink: mockCopy.source_branch_link,
+ });
+
+ const expected = `Pipeline #${pipeline.id} ${pipeline.details.status.label} for ${
+ pipeline.commit.short_id
+ } on !${pipeline.merge_request.iid} with ${pipeline.merge_request.source_branch} into ${
+ pipeline.merge_request.target_branch
+ }`;
+
+ const actual = trimText(vm.$el.querySelector('.js-pipeline-info-container').innerText);
+
+ expect(actual).toBe(expected);
+ });
+ });
+
+ describe('with pipeline.merge_request and flags.detached_merge_request_pipeline', () => {
+ it('should render info that includes the commit, MR, and source branch details', () => {
+ const mockCopy = JSON.parse(JSON.stringify(mockData));
+ const { pipeline } = mockCopy;
+ pipeline.flags.merge_request_pipeline = false;
+ pipeline.flags.detached_merge_request_pipeline = true;
+
+ vm = mountComponent(Component, {
+ pipeline,
+ hasCi: true,
+ ciStatus: 'success',
+ troubleshootingDocsPath: 'help',
+ sourceBranchLink: mockCopy.source_branch_link,
+ });
+
+ const expected = `Pipeline #${pipeline.id} ${pipeline.details.status.label} for ${
+ pipeline.commit.short_id
+ } on !${pipeline.merge_request.iid} with ${pipeline.merge_request.source_branch}`;
+
+ const actual = trimText(vm.$el.querySelector('.js-pipeline-info-container').innerText);
+
+ expect(actual).toBe(expected);
+ });
+ });
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_status_icon_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_status_icon_spec.js
index a0a336ae604..f622f52a7b9 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_status_icon_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_status_icon_spec.js
@@ -18,7 +18,7 @@ describe('MR widget status icon component', () => {
it('renders loading icon', () => {
vm = mountComponent(Component, { status: 'loading' });
- expect(vm.$el.querySelector('.mr-widget-icon i').classList).toContain('fa-spinner');
+ expect(vm.$el.querySelector('.mr-widget-icon span').classList).toContain('spinner');
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
index eb4fa0df727..d93badf8cd3 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
@@ -38,7 +38,7 @@ describe('MRWidgetAutoMergeFailed', () => {
Vue.nextTick(() => {
expect(vm.$el.querySelector('button').getAttribute('disabled')).toEqual('disabled');
- expect(vm.$el.querySelector('button i').classList).toContain('fa-spinner');
+ expect(vm.$el.querySelector('button .loading-container span').classList).toContain('spinner');
done();
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js
index 7da27bb8890..96e512d222a 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_checking_spec.js
@@ -20,7 +20,7 @@ describe('MRWidgetChecking', () => {
});
it('renders loading icon', () => {
- expect(vm.$el.querySelector('.mr-widget-icon i').classList).toContain('fa-spinner');
+ expect(vm.$el.querySelector('.mr-widget-icon span').classList).toContain('spinner');
});
it('renders information about merging', () => {
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 08e173b0a10..30659ad16f3 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -18,6 +18,7 @@ const createTestMr = customConfig => {
isPipelinePassing: false,
isMergeAllowed: true,
onlyAllowMergeIfPipelineSucceeds: false,
+ ffOnlyEnabled: false,
hasCI: false,
ciStatus: null,
sha: '12345678',
@@ -376,11 +377,29 @@ describe('ReadyToMerge', () => {
});
describe('initiateMergePolling', () => {
+ beforeEach(() => {
+ jasmine.clock().install();
+ });
+
+ afterEach(() => {
+ jasmine.clock().uninstall();
+ });
+
it('should call simplePoll', () => {
const simplePoll = spyOnDependency(ReadyToMerge, 'simplePoll');
vm.initiateMergePolling();
- expect(simplePoll).toHaveBeenCalled();
+ expect(simplePoll).toHaveBeenCalledWith(jasmine.any(Function), { timeout: 0 });
+ });
+
+ it('should call handleMergePolling', () => {
+ spyOn(vm, 'handleMergePolling');
+
+ vm.initiateMergePolling();
+
+ jasmine.clock().tick(2000);
+
+ expect(vm.handleMergePolling).toHaveBeenCalled();
});
});
@@ -624,6 +643,10 @@ describe('ReadyToMerge', () => {
const findCommitsHeaderElement = () => wrapper.find(CommitsHeader);
const findCommitEditElements = () => wrapper.findAll(CommitEdit);
const findCommitDropdownElement = () => wrapper.find(CommitMessageDropdown);
+ const findFirstCommitEditLabel = () =>
+ findCommitEditElements()
+ .at(0)
+ .props('label');
describe('squash checkbox', () => {
it('should be rendered when squash before merge is enabled and there is more than 1 commit', () => {
@@ -648,31 +671,129 @@ describe('ReadyToMerge', () => {
});
describe('commits count collapsible header', () => {
- it('should be rendered if fast-forward is disabled', () => {
+ it('should be rendered when fast-forward is disabled', () => {
createLocalComponent();
expect(findCommitsHeaderElement().exists()).toBeTruthy();
});
- it('should not be rendered if fast-forward is enabled', () => {
- createLocalComponent({ mr: { ffOnlyEnabled: true } });
+ describe('when fast-forward is enabled', () => {
+ it('should be rendered if squash and squash before are enabled and there is more than 1 commit', () => {
+ createLocalComponent({
+ mr: {
+ ffOnlyEnabled: true,
+ enableSquashBeforeMerge: true,
+ squash: true,
+ commitsCount: 2,
+ },
+ });
+
+ expect(findCommitsHeaderElement().exists()).toBeTruthy();
+ });
+
+ it('should not be rendered if squash before merge is disabled', () => {
+ createLocalComponent({
+ mr: {
+ ffOnlyEnabled: true,
+ enableSquashBeforeMerge: false,
+ squash: true,
+ commitsCount: 2,
+ },
+ });
+
+ expect(findCommitsHeaderElement().exists()).toBeFalsy();
+ });
+
+ it('should not be rendered if squash is disabled', () => {
+ createLocalComponent({
+ mr: {
+ ffOnlyEnabled: true,
+ squash: false,
+ enableSquashBeforeMerge: true,
+ commitsCount: 2,
+ },
+ });
+
+ expect(findCommitsHeaderElement().exists()).toBeFalsy();
+ });
+
+ it('should not be rendered if commits count is 1', () => {
+ createLocalComponent({
+ mr: {
+ ffOnlyEnabled: true,
+ squash: true,
+ enableSquashBeforeMerge: true,
+ commitsCount: 1,
+ },
+ });
- expect(findCommitsHeaderElement().exists()).toBeFalsy();
+ expect(findCommitsHeaderElement().exists()).toBeFalsy();
+ });
});
});
describe('commits edit components', () => {
+ describe('when fast-forward merge is enabled', () => {
+ it('should not be rendered if squash is disabled', () => {
+ createLocalComponent({
+ mr: {
+ ffOnlyEnabled: true,
+ squash: false,
+ enableSquashBeforeMerge: true,
+ commitsCount: 2,
+ },
+ });
+
+ expect(findCommitEditElements().length).toBe(0);
+ });
+
+ it('should not be rendered if squash before merge is disabled', () => {
+ createLocalComponent({
+ mr: {
+ ffOnlyEnabled: true,
+ squash: true,
+ enableSquashBeforeMerge: false,
+ commitsCount: 2,
+ },
+ });
+
+ expect(findCommitEditElements().length).toBe(0);
+ });
+
+ it('should not be rendered if there is only one commit', () => {
+ createLocalComponent({
+ mr: {
+ ffOnlyEnabled: true,
+ squash: true,
+ enableSquashBeforeMerge: true,
+ commitsCount: 1,
+ },
+ });
+
+ expect(findCommitEditElements().length).toBe(0);
+ });
+
+ it('should have one edit component if squash is enabled and there is more than 1 commit', () => {
+ createLocalComponent({
+ mr: {
+ ffOnlyEnabled: true,
+ squash: true,
+ enableSquashBeforeMerge: true,
+ commitsCount: 2,
+ },
+ });
+
+ expect(findCommitEditElements().length).toBe(1);
+ expect(findFirstCommitEditLabel()).toBe('Squash commit message');
+ });
+ });
+
it('should have one edit component when squash is disabled', () => {
createLocalComponent();
expect(findCommitEditElements().length).toBe(1);
});
- const findFirstCommitEditLabel = () =>
- findCommitEditElements()
- .at(0)
- .props('label');
-
it('should have two edit components when squash is enabled and there is more than 1 commit', () => {
createLocalComponent({
mr: {
diff --git a/spec/javascripts/vue_mr_widget/mock_data.js b/spec/javascripts/vue_mr_widget/mock_data.js
index 6ef07f81705..7ab203a6011 100644
--- a/spec/javascripts/vue_mr_widget/mock_data.js
+++ b/spec/javascripts/vue_mr_widget/mock_data.js
@@ -134,6 +134,8 @@ export default {
yaml_errors: false,
retryable: true,
cancelable: false,
+ merge_request_pipeline: false,
+ detached_merge_request_pipeline: true,
},
ref: {
name: 'daaaa',
@@ -141,6 +143,15 @@ export default {
tag: false,
branch: true,
},
+ merge_request: {
+ iid: 1,
+ path: '/root/detached-merge-request-pipelines/merge_requests/1',
+ title: 'Update README.md',
+ source_branch: 'feature-1',
+ source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1',
+ target_branch: 'master',
+ target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
+ },
commit: {
id: '104096c51715e12e7ae41f9333e9fa35b73f385d',
short_id: '104096c5',
diff --git a/spec/javascripts/vue_shared/components/commit_spec.js b/spec/javascripts/vue_shared/components/commit_spec.js
index 18fcdf7ede1..f2e20f626b5 100644
--- a/spec/javascripts/vue_shared/components/commit_spec.js
+++ b/spec/javascripts/vue_shared/components/commit_spec.js
@@ -61,7 +61,7 @@ describe('Commit component', () => {
});
it('should render a tag icon if it represents a tag', () => {
- expect(component.$el.querySelector('.icon-container i').classList).toContain('fa-tag');
+ expect(component.$el.querySelector('.icon-container svg.ic-tag')).not.toBeNull();
});
it('should render a link to the ref url', () => {
@@ -143,4 +143,92 @@ describe('Commit component', () => {
);
});
});
+
+ describe('When commit ref is provided, but merge ref is not', () => {
+ it('should render the commit ref', () => {
+ props = {
+ tag: false,
+ commitRef: {
+ name: 'master',
+ ref_url: 'http://localhost/namespace2/gitlabhq/tree/master',
+ },
+ commitUrl:
+ 'https://gitlab.com/gitlab-org/gitlab-ce/commit/b7836eddf62d663c665769e1b0960197fd215067',
+ shortSha: 'b7836edd',
+ title: null,
+ author: {},
+ };
+
+ component = mountComponent(CommitComponent, props);
+ const refEl = component.$el.querySelector('.ref-name');
+
+ expect(refEl.textContent).toContain('master');
+
+ expect(refEl.href).toBe(props.commitRef.ref_url);
+
+ expect(refEl.getAttribute('data-original-title')).toBe(props.commitRef.name);
+
+ expect(component.$el.querySelector('.icon-container .ic-branch')).not.toBeNull();
+ });
+ });
+
+ describe('When both commit and merge ref are provided', () => {
+ it('should render the merge ref', () => {
+ props = {
+ tag: false,
+ commitRef: {
+ name: 'master',
+ ref_url: 'http://localhost/namespace2/gitlabhq/tree/master',
+ },
+ commitUrl:
+ 'https://gitlab.com/gitlab-org/gitlab-ce/commit/b7836eddf62d663c665769e1b0960197fd215067',
+ mergeRequestRef: {
+ iid: 1234,
+ path: 'https://example.com/path/to/mr',
+ title: 'Test MR',
+ },
+ shortSha: 'b7836edd',
+ title: null,
+ author: {},
+ };
+
+ component = mountComponent(CommitComponent, props);
+ const refEl = component.$el.querySelector('.ref-name');
+
+ expect(refEl.textContent).toContain('1234');
+
+ expect(refEl.href).toBe(props.mergeRequestRef.path);
+
+ expect(refEl.getAttribute('data-original-title')).toBe(props.mergeRequestRef.title);
+
+ expect(component.$el.querySelector('.icon-container .ic-git-merge')).not.toBeNull();
+ });
+ });
+
+ describe('When showRefInfo === false', () => {
+ it('should not render any ref info', () => {
+ props = {
+ tag: false,
+ commitRef: {
+ name: 'master',
+ ref_url: 'http://localhost/namespace2/gitlabhq/tree/master',
+ },
+ commitUrl:
+ 'https://gitlab.com/gitlab-org/gitlab-ce/commit/b7836eddf62d663c665769e1b0960197fd215067',
+ mergeRequestRef: {
+ iid: 1234,
+ path: '/path/to/mr',
+ title: 'Test MR',
+ },
+ shortSha: 'b7836edd',
+ title: null,
+ author: {},
+ showRefInfo: false,
+ };
+
+ component = mountComponent(CommitComponent, props);
+
+ expect(component.$el.querySelector('.ref-name')).toBeNull();
+ });
+ });
});
diff --git a/spec/javascripts/vue_shared/components/file_icon_spec.js b/spec/javascripts/vue_shared/components/file_icon_spec.js
index 34c9b35e02a..5bea8c43da3 100644
--- a/spec/javascripts/vue_shared/components/file_icon_spec.js
+++ b/spec/javascripts/vue_shared/components/file_icon_spec.js
@@ -70,12 +70,9 @@ describe('File Icon component', () => {
loading: true,
});
- const { classList } = vm.$el.querySelector('i');
+ const { classList } = vm.$el.querySelector('.loading-container span');
- expect(classList.contains('fa')).toEqual(true);
- expect(classList.contains('fa-spin')).toEqual(true);
- expect(classList.contains('fa-spinner')).toEqual(true);
- expect(classList.contains('fa-1x')).toEqual(true);
+ expect(classList.contains('spinner')).toEqual(true);
});
it('should add a special class and a size class', () => {
diff --git a/spec/javascripts/vue_shared/components/header_ci_component_spec.js b/spec/javascripts/vue_shared/components/header_ci_component_spec.js
index 7a741bdc067..a9c1a67b39b 100644
--- a/spec/javascripts/vue_shared/components/header_ci_component_spec.js
+++ b/spec/javascripts/vue_shared/components/header_ci_component_spec.js
@@ -88,7 +88,7 @@ describe('Header CI Component', () => {
vm.actions[0].isLoading = true;
Vue.nextTick(() => {
- expect(vm.$el.querySelector('.btn .fa-spinner').getAttribute('style')).toBeFalsy();
+ expect(vm.$el.querySelector('.btn .spinner').getAttribute('style')).toBeFalsy();
done();
});
});
diff --git a/spec/lib/backup/uploads_spec.rb b/spec/lib/backup/uploads_spec.rb
new file mode 100644
index 00000000000..544d3754c0f
--- /dev/null
+++ b/spec/lib/backup/uploads_spec.rb
@@ -0,0 +1,18 @@
+require 'spec_helper'
+
+describe Backup::Uploads do
+ let(:progress) { StringIO.new }
+ subject(:backup) { described_class.new(progress) }
+
+ describe '#initialize' do
+ it 'uses the correct upload dir' do
+ Dir.mktmpdir do |tmpdir|
+ FileUtils.mkdir_p("#{tmpdir}/uploads")
+
+ allow(Gitlab.config.uploads).to receive(:storage_path) { tmpdir }
+
+ expect(backup.app_files_dir).to eq("#{tmpdir}/uploads")
+ end
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/output_safety_spec.rb b/spec/lib/banzai/filter/output_safety_spec.rb
new file mode 100644
index 00000000000..5ffe591c9a4
--- /dev/null
+++ b/spec/lib/banzai/filter/output_safety_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Filter::OutputSafety do
+ subject do
+ Class.new do
+ include Banzai::Filter::OutputSafety
+ end.new
+ end
+
+ let(:content) { '<pre><code>foo</code></pre>' }
+
+ context 'when given HTML is safe' do
+ let(:html) { content.html_safe }
+
+ it 'returns safe HTML' do
+ expect(subject.escape_once(html)).to eq(html)
+ end
+ end
+
+ context 'when given HTML is not safe' do
+ let(:html) { content }
+
+ it 'returns escaped HTML' do
+ expect(subject.escape_once(html)).to eq(ERB::Util.html_escape_once(html))
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/suggestion_filter_spec.rb b/spec/lib/banzai/filter/suggestion_filter_spec.rb
index b13c90b54bd..af6f002fa30 100644
--- a/spec/lib/banzai/filter/suggestion_filter_spec.rb
+++ b/spec/lib/banzai/filter/suggestion_filter_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Banzai::Filter::SuggestionFilter do
include FilterSpecHelper
- let(:input) { "<pre class='code highlight js-syntax-highlight suggestion'><code>foo\n</code></pre>" }
+ let(:input) { %(<pre class="code highlight js-syntax-highlight suggestion"><code>foo\n</code></pre>) }
let(:default_context) do
{ suggestions_filter_enabled: true }
end
@@ -23,4 +23,35 @@ describe Banzai::Filter::SuggestionFilter do
expect(result[:class]).to be_nil
end
+
+ context 'multi-line suggestions' do
+ let(:data_attr) { Banzai::Filter::SyntaxHighlightFilter::LANG_PARAMS_ATTR }
+ let(:input) { %(<pre class="code highlight js-syntax-highlight suggestion" #{data_attr}="-3+2"><code>foo\n</code></pre>) }
+
+ context 'feature disabled' do
+ before do
+ stub_feature_flags(multi_line_suggestions: false)
+ end
+
+ it 'removes data-lang-params if it matches a multi-line suggestion param' do
+ doc = filter(input, default_context)
+ pre = doc.css('pre').first
+
+ expect(pre[data_attr]).to be_nil
+ end
+ end
+
+ context 'feature enabled' do
+ before do
+ stub_feature_flags(multi_line_suggestions: true)
+ end
+
+ it 'keeps data-lang-params' do
+ doc = filter(input, default_context)
+ pre = doc.css('pre').first
+
+ expect(pre[data_attr]).to eq('-3+2')
+ end
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index ef52c572898..05057789cc1 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -45,7 +45,10 @@ describe Banzai::Filter::SyntaxHighlightFilter do
end
context "languages that should be passed through" do
- %w(math mermaid plantuml).each do |lang|
+ let(:delimiter) { described_class::PARAMS_DELIMITER }
+ let(:data_attr) { described_class::LANG_PARAMS_ATTR }
+
+ %w(math mermaid plantuml suggestion).each do |lang|
context "when #{lang} is specified" do
it "highlights as plaintext but with the correct language attribute and class" do
result = filter(%{<pre><code lang="#{lang}">This is a test</code></pre>})
@@ -55,6 +58,33 @@ describe Banzai::Filter::SyntaxHighlightFilter do
include_examples "XSS prevention", lang
end
+
+ context "when #{lang} has extra params" do
+ let(:lang_params) { 'foo-bar-kux' }
+
+ it "includes data-lang-params tag with extra information" do
+ result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}">This is a test</code></pre>})
+
+ expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight #{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
+ end
+
+ include_examples "XSS prevention", lang
+ include_examples "XSS prevention",
+ "#{lang}#{described_class::PARAMS_DELIMITER}&lt;script&gt;alert(1)&lt;/script&gt;"
+ include_examples "XSS prevention",
+ "#{lang}#{described_class::PARAMS_DELIMITER}<script>alert(1)</script>"
+ end
+ end
+
+ context 'when multiple param delimiters are used' do
+ let(:lang) { 'suggestion' }
+ let(:lang_params) { '-1+10' }
+
+ it "delimits on the first appearence" do
+ result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}#{delimiter}more-things">This is a test</code></pre>})
+
+ expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight #{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
+ end
end
end
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index dcbd12fe190..b765c265e69 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -207,6 +207,7 @@ describe Gitlab::Auth::OAuth::User do
before do
allow(ldap_user).to receive(:uid) { uid }
allow(ldap_user).to receive(:username) { uid }
+ allow(ldap_user).to receive(:name) { 'John Doe' }
allow(ldap_user).to receive(:email) { ['johndoe@example.com', 'john2@example.com'] }
allow(ldap_user).to receive(:dn) { dn }
end
@@ -221,6 +222,7 @@ describe Gitlab::Auth::OAuth::User do
it "creates a user with dual LDAP and omniauth identities" do
expect(gl_user).to be_valid
expect(gl_user.username).to eql uid
+ expect(gl_user.name).to eql 'John Doe'
expect(gl_user.email).to eql 'johndoe@example.com'
expect(gl_user.identities.length).to be 2
identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
@@ -232,11 +234,13 @@ describe Gitlab::Auth::OAuth::User do
)
end
- it "has email set as synced" do
+ it "has name and email set as synced" do
+ expect(gl_user.user_synced_attributes_metadata.name_synced).to be_truthy
expect(gl_user.user_synced_attributes_metadata.email_synced).to be_truthy
end
- it "has email set as read-only" do
+ it "has name and email set as read-only" do
+ expect(gl_user.read_only_attribute?(:name)).to be_truthy
expect(gl_user.read_only_attribute?(:email)).to be_truthy
end
@@ -246,7 +250,7 @@ describe Gitlab::Auth::OAuth::User do
end
context "and LDAP user has an account already" do
- let!(:existing_user) { create(:omniauth_user, email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') }
+ let!(:existing_user) { create(:omniauth_user, name: 'John Doe', email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') }
it "adds the omniauth identity to the LDAP account" do
allow(Gitlab::Auth::LDAP::Person).to receive(:find_by_uid).and_return(ldap_user)
@@ -254,6 +258,7 @@ describe Gitlab::Auth::OAuth::User do
expect(gl_user).to be_valid
expect(gl_user.username).to eql 'john'
+ expect(gl_user.name).to eql 'John Doe'
expect(gl_user.email).to eql 'john@example.com'
expect(gl_user.identities.length).to be 2
identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
diff --git a/spec/lib/gitlab/authorized_keys_spec.rb b/spec/lib/gitlab/authorized_keys_spec.rb
new file mode 100644
index 00000000000..42bc509eeef
--- /dev/null
+++ b/spec/lib/gitlab/authorized_keys_spec.rb
@@ -0,0 +1,194 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::AuthorizedKeys do
+ let(:logger) { double('logger').as_null_object }
+
+ subject { described_class.new(logger) }
+
+ describe '#add_key' do
+ context 'authorized_keys file exists' do
+ before do
+ create_authorized_keys_fixture
+ end
+
+ after do
+ delete_authorized_keys_file
+ end
+
+ it "adds a line at the end of the file and strips trailing garbage" do
+ auth_line = "command=\"#{Gitlab.config.gitlab_shell.path}/bin/gitlab-shell key-741\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty ssh-rsa AAAAB3NzaDAxx2E"
+
+ expect(logger).to receive(:info).with('Adding key (key-741): ssh-rsa AAAAB3NzaDAxx2E')
+ expect(subject.add_key('key-741', 'ssh-rsa AAAAB3NzaDAxx2E trailing garbage'))
+ .to be_truthy
+ expect(File.read(tmp_authorized_keys_path)).to eq("existing content\n#{auth_line}\n")
+ end
+ end
+
+ context 'authorized_keys file does not exist' do
+ before do
+ delete_authorized_keys_file
+ end
+
+ it 'creates the file' do
+ expect(subject.add_key('key-741', 'ssh-rsa AAAAB3NzaDAxx2E')).to be_truthy
+ expect(File.exist?(tmp_authorized_keys_path)).to be_truthy
+ end
+ end
+ end
+
+ describe '#batch_add_keys' do
+ let(:keys) do
+ [
+ double(shell_id: 'key-12', key: 'ssh-dsa ASDFASGADG trailing garbage'),
+ double(shell_id: 'key-123', key: 'ssh-rsa GFDGDFSGSDFG')
+ ]
+ end
+
+ context 'authorized_keys file exists' do
+ before do
+ create_authorized_keys_fixture
+ end
+
+ after do
+ delete_authorized_keys_file
+ end
+
+ it "adds lines at the end of the file" do
+ auth_line1 = "command=\"#{Gitlab.config.gitlab_shell.path}/bin/gitlab-shell key-12\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty ssh-dsa ASDFASGADG"
+ auth_line2 = "command=\"#{Gitlab.config.gitlab_shell.path}/bin/gitlab-shell key-123\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty ssh-rsa GFDGDFSGSDFG"
+
+ expect(logger).to receive(:info).with('Adding key (key-12): ssh-dsa ASDFASGADG')
+ expect(logger).to receive(:info).with('Adding key (key-123): ssh-rsa GFDGDFSGSDFG')
+ expect(subject.batch_add_keys(keys)).to be_truthy
+ expect(File.read(tmp_authorized_keys_path)).to eq("existing content\n#{auth_line1}\n#{auth_line2}\n")
+ end
+
+ context "invalid key" do
+ let(:keys) { [double(shell_id: 'key-123', key: "ssh-rsa A\tSDFA\nSGADG")] }
+
+ it "doesn't add keys" do
+ expect(subject.batch_add_keys(keys)).to be_falsey
+ expect(File.read(tmp_authorized_keys_path)).to eq("existing content\n")
+ end
+ end
+ end
+
+ context 'authorized_keys file does not exist' do
+ before do
+ delete_authorized_keys_file
+ end
+
+ it 'creates the file' do
+ expect(subject.batch_add_keys(keys)).to be_truthy
+ expect(File.exist?(tmp_authorized_keys_path)).to be_truthy
+ end
+ end
+ end
+
+ describe '#rm_key' do
+ context 'authorized_keys file exists' do
+ before do
+ create_authorized_keys_fixture
+ end
+
+ after do
+ delete_authorized_keys_file
+ end
+
+ it "removes the right line" do
+ other_line = "command=\"#{Gitlab.config.gitlab_shell.path}/bin/gitlab-shell key-742\",options ssh-rsa AAAAB3NzaDAxx2E"
+ delete_line = "command=\"#{Gitlab.config.gitlab_shell.path}/bin/gitlab-shell key-741\",options ssh-rsa AAAAB3NzaDAxx2E"
+ erased_line = delete_line.gsub(/./, '#')
+ File.open(tmp_authorized_keys_path, 'a') do |auth_file|
+ auth_file.puts delete_line
+ auth_file.puts other_line
+ end
+
+ expect(logger).to receive(:info).with('Removing key (key-741)')
+ expect(subject.rm_key('key-741')).to be_truthy
+ expect(File.read(tmp_authorized_keys_path)).to eq("existing content\n#{erased_line}\n#{other_line}\n")
+ end
+ end
+
+ context 'authorized_keys file does not exist' do
+ before do
+ delete_authorized_keys_file
+ end
+
+ it 'returns false' do
+ expect(subject.rm_key('key-741')).to be_falsey
+ end
+ end
+ end
+
+ describe '#clear' do
+ context 'authorized_keys file exists' do
+ before do
+ create_authorized_keys_fixture
+ end
+
+ after do
+ delete_authorized_keys_file
+ end
+
+ it "returns true" do
+ expect(subject.clear).to be_truthy
+ end
+ end
+
+ context 'authorized_keys file does not exist' do
+ before do
+ delete_authorized_keys_file
+ end
+
+ it "still returns true" do
+ expect(subject.clear).to be_truthy
+ end
+ end
+ end
+
+ describe '#list_key_ids' do
+ context 'authorized_keys file exists' do
+ before do
+ create_authorized_keys_fixture(
+ existing_content:
+ "key-1\tssh-dsa AAA\nkey-2\tssh-rsa BBB\nkey-3\tssh-rsa CCC\nkey-9000\tssh-rsa DDD\n"
+ )
+ end
+
+ after do
+ delete_authorized_keys_file
+ end
+
+ it 'returns array of key IDs' do
+ expect(subject.list_key_ids).to eq([1, 2, 3, 9000])
+ end
+ end
+
+ context 'authorized_keys file does not exist' do
+ before do
+ delete_authorized_keys_file
+ end
+
+ it 'returns an empty array' do
+ expect(subject.list_key_ids).to be_empty
+ end
+ end
+ end
+
+ def create_authorized_keys_fixture(existing_content: 'existing content')
+ FileUtils.mkdir_p(File.dirname(tmp_authorized_keys_path))
+ File.open(tmp_authorized_keys_path, 'w') { |file| file.puts(existing_content) }
+ end
+
+ def delete_authorized_keys_file
+ File.delete(tmp_authorized_keys_path) if File.exist?(tmp_authorized_keys_path)
+ end
+
+ def tmp_authorized_keys_path
+ Gitlab.config.gitlab_shell.authorized_keys_file
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
index bc71a90605a..e7ff9169f1b 100644
--- a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
+++ b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
@@ -172,10 +172,8 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :m
let(:exception) { ActiveRecord::RecordNotFound }
let(:perform_ignoring_exceptions) do
- begin
- subject.perform(start_id, stop_id)
- rescue described_class::Error
- end
+ subject.perform(start_id, stop_id)
+ rescue described_class::Error
end
before do
diff --git a/spec/lib/gitlab/badge/pipeline/template_spec.rb b/spec/lib/gitlab/badge/pipeline/template_spec.rb
index 20fa4f879c3..bcef0b7e120 100644
--- a/spec/lib/gitlab/badge/pipeline/template_spec.rb
+++ b/spec/lib/gitlab/badge/pipeline/template_spec.rb
@@ -59,6 +59,16 @@ describe Gitlab::Badge::Pipeline::Template do
end
end
+ context 'when status is preparing' do
+ before do
+ allow(badge).to receive(:status).and_return('preparing')
+ end
+
+ it 'has expected color' do
+ expect(template.value_color).to eq '#dfb317'
+ end
+ end
+
context 'when status is unknown' do
before do
allow(badge).to receive(:status).and_return('unknown')
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index c432cc223b9..e1a2bae5fe8 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -95,6 +95,9 @@ describe Gitlab::BitbucketImport::Importer do
subject { described_class.new(project) }
describe '#import_pull_requests' do
+ let(:source_branch_sha) { sample.commits.last }
+ let(:target_branch_sha) { sample.commits.first }
+
before do
allow(subject).to receive(:import_wiki)
allow(subject).to receive(:import_issues)
@@ -102,9 +105,9 @@ describe Gitlab::BitbucketImport::Importer do
pull_request = instance_double(
Bitbucket::Representation::PullRequest,
iid: 10,
- source_branch_sha: sample.commits.last,
+ source_branch_sha: source_branch_sha,
source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
- target_branch_sha: sample.commits.first,
+ target_branch_sha: target_branch_sha,
target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
title: 'This is a title',
description: 'This is a test pull request',
@@ -162,6 +165,19 @@ describe Gitlab::BitbucketImport::Importer do
expect(reply_note).to be_a(DiffNote)
expect(reply_note.note).to eq(@reply.note)
end
+
+ context "when branches' sha is not found in the repository" do
+ let(:source_branch_sha) { 'a' * Commit::MIN_SHA_LENGTH }
+ let(:target_branch_sha) { 'b' * Commit::MIN_SHA_LENGTH }
+
+ it 'uses the pull request sha references' do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ merge_request_diff = MergeRequest.first.merge_request_diff
+ expect(merge_request_diff.head_commit_sha).to eq source_branch_sha
+ expect(merge_request_diff.start_commit_sha).to eq target_branch_sha
+ end
+ end
end
context 'issues statuses' do
diff --git a/spec/lib/gitlab/checks/branch_check_spec.rb b/spec/lib/gitlab/checks/branch_check_spec.rb
index 12beeecd470..8d5ab27a17c 100644
--- a/spec/lib/gitlab/checks/branch_check_spec.rb
+++ b/spec/lib/gitlab/checks/branch_check_spec.rb
@@ -108,64 +108,86 @@ describe Gitlab::Checks::BranchCheck do
end
context 'protected branch creation feature is enabled' do
- context 'user is not allowed to create protected branches' do
+ context 'user can push to branch' do
before do
allow(user_access)
- .to receive(:can_merge_to_branch?)
+ .to receive(:can_push_to_branch?)
.with('feature')
- .and_return(false)
+ .and_return(true)
end
- it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to create protected branches on this project.')
+ it 'does not raise an error' do
+ expect { subject.validate! }.not_to raise_error
end
end
- context 'user is allowed to create protected branches' do
+ context 'user cannot push to branch' do
before do
allow(user_access)
- .to receive(:can_merge_to_branch?)
+ .to receive(:can_push_to_branch?)
.with('feature')
- .and_return(true)
-
- allow(project.repository)
- .to receive(:branch_names_contains_sha)
- .with(newrev)
- .and_return(['branch'])
+ .and_return(false)
end
- context "newrev isn't in any protected branches" do
+ context 'user cannot merge to branch' do
before do
- allow(ProtectedBranch)
- .to receive(:any_protected?)
- .with(project, ['branch'])
+ allow(user_access)
+ .to receive(:can_merge_to_branch?)
+ .with('feature')
.and_return(false)
end
it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only use an existing protected branch ref as the basis of a new protected branch.')
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You are not allowed to create protected branches on this project.')
end
end
- context 'newrev is included in a protected branch' do
+ context 'user can merge to branch' do
before do
- allow(ProtectedBranch)
- .to receive(:any_protected?)
- .with(project, ['branch'])
+ allow(user_access)
+ .to receive(:can_merge_to_branch?)
+ .with('feature')
.and_return(true)
+
+ allow(project.repository)
+ .to receive(:branch_names_contains_sha)
+ .with(newrev)
+ .and_return(['branch'])
end
- context 'via web interface' do
- let(:protocol) { 'web' }
+ context "newrev isn't in any protected branches" do
+ before do
+ allow(ProtectedBranch)
+ .to receive(:any_protected?)
+ .with(project, ['branch'])
+ .and_return(false)
+ end
- it 'allows branch creation' do
- expect { subject.validate! }.not_to raise_error
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only use an existing protected branch ref as the basis of a new protected branch.')
end
end
- context 'via SSH' do
- it 'raises an error' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only create protected branches using the web interface and API.')
+ context 'newrev is included in a protected branch' do
+ before do
+ allow(ProtectedBranch)
+ .to receive(:any_protected?)
+ .with(project, ['branch'])
+ .and_return(true)
+ end
+
+ context 'via web interface' do
+ let(:protocol) { 'web' }
+
+ it 'allows branch creation' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+
+ context 'via SSH' do
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You can only create protected branches using the web interface and API.')
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/build/policy/refs_spec.rb b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
index b4ddbf89b70..ec0450643c3 100644
--- a/spec/lib/gitlab/ci/build/policy/refs_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/refs_spec.rb
@@ -92,10 +92,23 @@ describe Gitlab::Ci::Build::Policy::Refs do
.to be_satisfied_by(pipeline)
end
+ it 'is satisfied when case-insensitive regexp matches pipeline ref' do
+ expect(described_class.new(['/DOCS-.*/i']))
+ .to be_satisfied_by(pipeline)
+ end
+
it 'is not satisfied when regexp does not match pipeline ref' do
expect(described_class.new(['/fix-.*/']))
.not_to be_satisfied_by(pipeline)
end
end
+
+ context 'malicious regexp' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: malicious_text) }
+
+ subject { described_class.new([malicious_regexp_ruby]) }
+
+ include_examples 'malicious regexp'
+ end
end
end
diff --git a/spec/lib/gitlab/ci/build/prerequisite/factory_spec.rb b/spec/lib/gitlab/ci/build/prerequisite/factory_spec.rb
new file mode 100644
index 00000000000..5187f99a441
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/prerequisite/factory_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Prerequisite::Factory do
+ let(:build) { create(:ci_build) }
+
+ describe '.for_build' do
+ let(:kubernetes_namespace) do
+ instance_double(
+ Gitlab::Ci::Build::Prerequisite::KubernetesNamespace,
+ unmet?: unmet)
+ end
+
+ subject { described_class.new(build).unmet }
+
+ before do
+ expect(Gitlab::Ci::Build::Prerequisite::KubernetesNamespace)
+ .to receive(:new).with(build).and_return(kubernetes_namespace)
+ end
+
+ context 'prerequisite is unmet' do
+ let(:unmet) { true }
+
+ it { is_expected.to eq [kubernetes_namespace] }
+ end
+
+ context 'prerequisite is met' do
+ let(:unmet) { false }
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
new file mode 100644
index 00000000000..62dcd80fad7
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
+ let(:build) { create(:ci_build) }
+
+ describe '#unmet?' do
+ subject { described_class.new(build).unmet? }
+
+ context 'build has no deployment' do
+ before do
+ expect(build.deployment).to be_nil
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'build has a deployment' do
+ let!(:deployment) { create(:deployment, deployable: build) }
+
+ context 'and a cluster to deploy to' do
+ let(:cluster) { create(:cluster, projects: [build.project]) }
+
+ before do
+ allow(build.deployment).to receive(:cluster).and_return(cluster)
+ end
+
+ it { is_expected.to be_truthy }
+
+ context 'and a namespace is already created for this project' do
+ let!(:kubernetes_namespace) { create(:cluster_kubernetes_namespace, cluster: cluster, project: build.project) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'and no cluster to deploy to' do
+ before do
+ expect(deployment.cluster).to be_nil
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
+ describe '#complete!' do
+ let!(:deployment) { create(:deployment, deployable: build) }
+ let(:service) { double(execute: true) }
+
+ subject { described_class.new(build).complete! }
+
+ context 'completion is required' do
+ let(:cluster) { create(:cluster, projects: [build.project]) }
+
+ before do
+ allow(build.deployment).to receive(:cluster).and_return(cluster)
+ end
+
+ it 'creates a kubernetes namespace' do
+ expect(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService)
+ .to receive(:new)
+ .with(cluster: cluster, kubernetes_namespace: instance_of(Clusters::KubernetesNamespace))
+ .and_return(service)
+
+ expect(service).to receive(:execute).once
+
+ subject
+ end
+ end
+
+ context 'completion is not required' do
+ before do
+ expect(deployment.cluster).to be_nil
+ end
+
+ it 'does not create a namespace' do
+ expect(Clusters::Gcp::Kubernetes::CreateOrUpdateNamespaceService).not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
index 3ebc2e94727..cff7f57ceff 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
@@ -85,7 +85,7 @@ describe Gitlab::Ci::Pipeline::Expression::Lexeme::Pattern do
end
it 'raises error if evaluated regexp is not valid' do
- allow(Gitlab::UntrustedRegexp).to receive(:valid?).and_return(true)
+ allow(Gitlab::UntrustedRegexp::RubySyntax).to receive(:valid?).and_return(true)
regexp = described_class.new('/invalid ( .*/')
diff --git a/spec/lib/gitlab/ci/status/build/preparing_spec.rb b/spec/lib/gitlab/ci/status/build/preparing_spec.rb
new file mode 100644
index 00000000000..4d8945845ba
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/build/preparing_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Status::Build::Preparing do
+ subject do
+ described_class.new(double('subject'))
+ end
+
+ describe '#illustration' do
+ it { expect(subject.illustration).to include(:image, :size, :title, :content) }
+ end
+
+ describe '.matches?' do
+ subject { described_class.matches?(build, nil) }
+
+ context 'when build is preparing' do
+ let(:build) { create(:ci_build, :preparing) }
+
+ it 'is a correct match' do
+ expect(subject).to be true
+ end
+ end
+
+ context 'when build is not preparing' do
+ let(:build) { create(:ci_build, :success) }
+
+ it 'does not match' do
+ expect(subject).to be false
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/status/preparing_spec.rb b/spec/lib/gitlab/ci/status/preparing_spec.rb
new file mode 100644
index 00000000000..7211c0e506d
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/preparing_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Status::Preparing do
+ subject do
+ described_class.new(double('subject'), nil)
+ end
+
+ describe '#text' do
+ it { expect(subject.text).to eq 'preparing' }
+ end
+
+ describe '#label' do
+ it { expect(subject.label).to eq 'preparing' }
+ end
+
+ describe '#icon' do
+ it { expect(subject.icon).to eq 'status_created' }
+ end
+
+ describe '#favicon' do
+ it { expect(subject.favicon).to eq 'favicon_status_created' }
+ end
+
+ describe '#group' do
+ it { expect(subject.group).to eq 'preparing' }
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb
index 38626f728d7..e45ea1c2528 100644
--- a/spec/lib/gitlab/ci/trace/stream_spec.rb
+++ b/spec/lib/gitlab/ci/trace/stream_spec.rb
@@ -414,7 +414,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'malicious regexp' do
let(:data) { malicious_text }
- let(:regex) { malicious_regexp }
+ let(:regex) { malicious_regexp_re2 }
include_examples 'malicious regexp'
end
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb
new file mode 100644
index 00000000000..4bc0a4c1398
--- /dev/null
+++ b/spec/lib/gitlab/danger/teammate_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+describe Gitlab::Danger::Teammate do
+ subject { described_class.new({ 'projects' => projects }) }
+ let(:projects) { { project => capabilities } }
+ let(:project) { double }
+
+ describe 'multiple roles project project' do
+ let(:capabilities) { ['reviewer backend', 'maintainer frontend', 'trainee_maintainer database'] }
+
+ it '#reviewer? supports multiple roles per project' do
+ expect(subject.reviewer?(project, 'backend')).to be_truthy
+ end
+
+ it '#traintainer? supports multiple roles per project' do
+ expect(subject.traintainer?(project, 'database')).to be_truthy
+ end
+
+ it '#maintainer? supports multiple roles per project' do
+ expect(subject.maintainer?(project, 'frontend')).to be_truthy
+ end
+ end
+
+ describe 'one role project project' do
+ let(:capabilities) { 'reviewer backend' }
+
+ it '#reviewer? supports one role per project' do
+ expect(subject.reviewer?(project, 'backend')).to be_truthy
+ end
+
+ it '#traintainer? supports one role per project' do
+ expect(subject.traintainer?(project, 'database')).to be_falsey
+ end
+
+ it '#maintainer? supports one role per project' do
+ expect(subject.maintainer?(project, 'frontend')).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb
index b44e8c5a110..bd3c66d0548 100644
--- a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb
+++ b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb
@@ -6,10 +6,11 @@ describe Gitlab::Database::Count::ReltuplesCountStrategy do
create(:identity)
end
- let(:models) { [Project, Identity] }
subject { described_class.new(models).count }
describe '#count', :postgresql do
+ let(:models) { [Project, Identity] }
+
context 'when reltuples is up to date' do
before do
ActiveRecord::Base.connection.execute('ANALYZE projects')
@@ -23,6 +24,22 @@ describe Gitlab::Database::Count::ReltuplesCountStrategy do
end
end
+ context 'when models using single-type inheritance are used' do
+ let(:models) { [Group, CiService, Namespace] }
+
+ before do
+ models.each do |model|
+ ActiveRecord::Base.connection.execute("ANALYZE #{model.table_name}")
+ end
+ end
+
+ it 'returns nil counts for inherited tables' do
+ models.each { |model| expect(model).not_to receive(:count) }
+
+ expect(subject).to eq({ Namespace => 3 })
+ end
+ end
+
context 'insufficient permissions' do
it 'returns an empty hash' do
allow(ActiveRecord::Base).to receive(:transaction).and_raise(PG::InsufficientPrivilege)
diff --git a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
index 203f9344a41..40d810b195b 100644
--- a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
+++ b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
@@ -4,15 +4,23 @@ describe Gitlab::Database::Count::TablesampleCountStrategy do
before do
create_list(:project, 3)
create(:identity)
+ create(:group)
end
- let(:models) { [Project, Identity] }
+ let(:models) { [Project, Identity, Group, Namespace] }
let(:strategy) { described_class.new(models) }
subject { strategy.count }
describe '#count', :postgresql do
- let(:estimates) { { Project => threshold + 1, Identity => threshold - 1 } }
+ let(:estimates) do
+ {
+ Project => threshold + 1,
+ Identity => threshold - 1,
+ Group => threshold + 1,
+ Namespace => threshold + 1
+ }
+ end
let(:threshold) { Gitlab::Database::Count::TablesampleCountStrategy::EXACT_COUNT_THRESHOLD }
before do
@@ -30,9 +38,13 @@ describe Gitlab::Database::Count::TablesampleCountStrategy do
context 'for tables with an estimated large size' do
it 'performs a tablesample count' do
expect(Project).not_to receive(:count)
+ expect(Group).not_to receive(:count)
+ expect(Namespace).not_to receive(:count)
result = subject
expect(result[Project]).to eq(3)
+ expect(result[Group]).to eq(1)
+ expect(result[Namespace]).to eq(4)
end
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 60106ee3c0b..5f57cd6b825 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -17,6 +17,20 @@ describe Gitlab::Database do
end
end
+ describe '.human_adapter_name' do
+ it 'returns PostgreSQL when using PostgreSQL' do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+
+ expect(described_class.human_adapter_name).to eq('PostgreSQL')
+ end
+
+ it 'returns MySQL when using MySQL' do
+ allow(described_class).to receive(:postgresql?).and_return(false)
+
+ expect(described_class.human_adapter_name).to eq('MySQL')
+ end
+ end
+
# These are just simple smoke tests to check if the methods work (regardless
# of what they may return).
describe '.mysql?' do
@@ -87,6 +101,38 @@ describe Gitlab::Database do
end
end
+ describe '.postgresql_minimum_supported_version?' do
+ it 'returns false when not using PostgreSQL' do
+ allow(described_class).to receive(:postgresql?).and_return(false)
+
+ expect(described_class.postgresql_minimum_supported_version?).to eq(false)
+ end
+
+ context 'when using PostgreSQL' do
+ before do
+ allow(described_class).to receive(:postgresql?).and_return(true)
+ end
+
+ it 'returns false when using PostgreSQL 9.5' do
+ allow(described_class).to receive(:version).and_return('9.5')
+
+ expect(described_class.postgresql_minimum_supported_version?).to eq(false)
+ end
+
+ it 'returns true when using PostgreSQL 9.6' do
+ allow(described_class).to receive(:version).and_return('9.6')
+
+ expect(described_class.postgresql_minimum_supported_version?).to eq(true)
+ end
+
+ it 'returns true when using PostgreSQL 10 or newer' do
+ allow(described_class).to receive(:version).and_return('10')
+
+ expect(described_class.postgresql_minimum_supported_version?).to eq(true)
+ end
+ end
+ end
+
describe '.join_lateral_supported?' do
it 'returns false when using MySQL' do
allow(described_class).to receive(:postgresql?).and_return(false)
@@ -195,6 +241,12 @@ describe Gitlab::Database do
end
end
+ describe '.pg_last_xact_replay_timestamp' do
+ it 'returns pg_last_xact_replay_timestamp' do
+ expect(described_class.pg_last_xact_replay_timestamp).to eq('pg_last_xact_replay_timestamp')
+ end
+ end
+
describe '.nulls_last_order' do
context 'when using PostgreSQL' do
before do
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 611c3e946ed..cc36060f864 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -72,6 +72,13 @@ describe Gitlab::Diff::File do
expect(diff_file.diff_lines_for_serializer.last.type).to eq('match')
end
+ context 'when called multiple times' do
+ it 'only adds bottom match line once' do
+ expect(diff_file.diff_lines_for_serializer.size).to eq(31)
+ expect(diff_file.diff_lines_for_serializer.size).to eq(31)
+ end
+ end
+
context 'when deleted' do
let(:commit) { project.commit('d59c60028b053793cecfb4022de34602e1a9218e') }
let(:diff_file) { commit.diffs.diff_file_with_old_path('files/js/commit.js.coffee') }
diff --git a/spec/lib/gitlab/diff/suggestion_diff_spec.rb b/spec/lib/gitlab/diff/suggestion_diff_spec.rb
new file mode 100644
index 00000000000..5a32c2bea37
--- /dev/null
+++ b/spec/lib/gitlab/diff/suggestion_diff_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Diff::SuggestionDiff do
+ describe '#diff_lines' do
+ let(:from_content) do
+ <<-BLOB.strip_heredoc
+ "tags": ["devel", "development", "nightly"],
+ "desktop-file-name-prefix": "(Development) ",
+ "finish-args": "foo",
+ BLOB
+ end
+
+ let(:to_content) do
+ <<-BLOB.strip_heredoc
+ "buildsystem": "meson",
+ "builddir": true,
+ "name": "nautilus",
+ "bar": "bar",
+ BLOB
+ end
+
+ let(:suggestion) do
+ instance_double(Suggestion, from_line: 12,
+ from_content: from_content,
+ to_content: to_content)
+ end
+
+ subject { described_class.new(suggestion).diff_lines }
+
+ let(:expected_diff_lines) do
+ [
+ { old_pos: 12, new_pos: 12, type: "match", text: "@@ -12 +12" },
+ { old_pos: 12, new_pos: 12, type: "old", text: "-\"tags\": [\"devel\", \"development\", \"nightly\"]," },
+ { old_pos: 13, new_pos: 12, type: "old", text: "-\"desktop-file-name-prefix\": \"(Development) \"," },
+ { old_pos: 14, new_pos: 12, type: "old", text: "-\"finish-args\": \"foo\"," },
+ { old_pos: 15, new_pos: 12, type: "new", text: "+\"buildsystem\": \"meson\"," },
+ { old_pos: 15, new_pos: 13, type: "new", text: "+\"builddir\": true," },
+ { old_pos: 15, new_pos: 14, type: "new", text: "+\"name\": \"nautilus\"," },
+ { old_pos: 15, new_pos: 15, type: "new", text: "+\"bar\": \"bar\"," }
+ ]
+ end
+
+ it 'returns diff lines with correct line numbers' do
+ diff_lines = subject
+
+ expect(diff_lines).to all(be_a(Gitlab::Diff::Line))
+
+ expected_diff_lines.each_with_index do |expected_line, index|
+ expect(diff_lines[index].to_hash).to include(expected_line)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/fake_application_settings_spec.rb b/spec/lib/gitlab/fake_application_settings_spec.rb
index af12e13d36d..c81cb83d9f4 100644
--- a/spec/lib/gitlab/fake_application_settings_spec.rb
+++ b/spec/lib/gitlab/fake_application_settings_spec.rb
@@ -1,32 +1,33 @@
require 'spec_helper'
describe Gitlab::FakeApplicationSettings do
- let(:defaults) { { password_authentication_enabled_for_web: false, foobar: 'asdf', signup_enabled: true, 'test?' => 123 } }
+ let(:defaults) do
+ described_class.defaults.merge(
+ foobar: 'asdf',
+ 'test?' => 123
+ )
+ end
- subject { described_class.new(defaults) }
+ let(:setting) { described_class.new(defaults) }
it 'wraps OpenStruct variables properly' do
- expect(subject.password_authentication_enabled_for_web).to be_falsey
- expect(subject.signup_enabled).to be_truthy
- expect(subject.foobar).to eq('asdf')
+ expect(setting.password_authentication_enabled_for_web).to be_truthy
+ expect(setting.signup_enabled).to be_truthy
+ expect(setting.foobar).to eq('asdf')
end
it 'defines predicate methods' do
- expect(subject.password_authentication_enabled_for_web?).to be_falsey
- expect(subject.signup_enabled?).to be_truthy
- end
-
- it 'predicate method changes when value is updated' do
- subject.password_authentication_enabled_for_web = true
-
- expect(subject.password_authentication_enabled_for_web?).to be_truthy
+ expect(setting.password_authentication_enabled_for_web?).to be_truthy
+ expect(setting.signup_enabled?).to be_truthy
end
it 'does not define a predicate method' do
- expect(subject.foobar?).to be_nil
+ expect(setting.foobar?).to be_nil
end
it 'does not override an existing predicate method' do
- expect(subject.test?).to eq(123)
+ expect(setting.test?).to eq(123)
end
+
+ it_behaves_like 'application settings examples'
end
diff --git a/spec/lib/gitlab/git/repository_cleaner_spec.rb b/spec/lib/gitlab/git/repository_cleaner_spec.rb
index 7f9cc2bc9ec..6602f22843f 100644
--- a/spec/lib/gitlab/git/repository_cleaner_spec.rb
+++ b/spec/lib/gitlab/git/repository_cleaner_spec.rb
@@ -37,14 +37,12 @@ describe Gitlab::Git::RepositoryCleaner do
let(:object_map) { Gitlab::HttpIO.new(url, object_map_data.size) }
around do |example|
- begin
- tempfile.write(object_map_data)
- tempfile.close
+ tempfile.write(object_map_data)
+ tempfile.close
- example.run
- ensure
- tempfile.unlink
- end
+ example.run
+ ensure
+ tempfile.unlink
end
it 'removes internal references' do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 7e6dfa30e37..8ba6862392c 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -1688,6 +1688,11 @@ describe Gitlab::Git::Repository, :seed_helper do
expect(repository.delete_config(*%w[does.not.exist test.foo1 test.foo2])).to be_nil
+ # Workaround for https://github.com/libgit2/rugged/issues/785: If
+ # Gitaly changes .gitconfig while Rugged has the file loaded
+ # Rugged::Repository#each_key will report stale values unless a
+ # lookup is done first.
+ expect(repository_rugged.config['test.foo1']).to be_nil
config_keys = repository_rugged.config.each_key.to_a
expect(config_keys).not_to include('test.foo1')
expect(config_keys).not_to include('test.foo2')
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index cf12baf1a93..f1acb1d9bc4 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -149,11 +149,21 @@ describe Gitlab::GitalyClient do
end
end
- context 'when RequestStore is enabled', :request_store do
+ context 'when RequestStore is enabled and the maximum number of calls is not enforced by a feature flag', :request_store do
+ before do
+ stub_feature_flags(gitaly_enforce_requests_limits: false)
+ end
+
it 'allows up the maximum number of allowed calls' do
expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS) }.not_to raise_error
end
+ it 'allows the maximum number of calls to be exceeded if GITALY_DISABLE_REQUEST_LIMITS is set' do
+ stub_env('GITALY_DISABLE_REQUEST_LIMITS', 'true')
+
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 1) }.not_to raise_error
+ end
+
context 'when the maximum number of calls has been reached' do
before do
call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS)
@@ -189,6 +199,32 @@ describe Gitlab::GitalyClient do
end
end
+ context 'in production and when RequestStore is enabled', :request_store do
+ before do
+ allow(Rails.env).to receive(:production?).and_return(true)
+ end
+
+ context 'when the maximum number of calls is enforced by a feature flag' do
+ before do
+ stub_feature_flags(gitaly_enforce_requests_limits: true)
+ end
+
+ it 'does not allow the maximum number of calls to be exceeded' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 1) }.to raise_error(Gitlab::GitalyClient::TooManyInvocationsError)
+ end
+ end
+
+ context 'when the maximum number of calls is not enforced by a feature flag' do
+ before do
+ stub_feature_flags(gitaly_enforce_requests_limits: false)
+ end
+
+ it 'allows the maximum number of calls to be exceeded' do
+ expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 1) }.not_to raise_error
+ end
+ end
+ end
+
context 'when RequestStore is not active' do
it 'does not raise errors when the maximum number of allowed calls is exceeded' do
expect { call_gitaly(Gitlab::GitalyClient::MAXIMUM_GITALY_CALLS + 2) }.not_to raise_error
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
index 15e59718dce..680de47de2b 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_importer_spec.rb
@@ -273,6 +273,11 @@ describe Gitlab::GithubImport::Importer::PullRequestImporter, :clean_gitlab_redi
mr.state = 'opened'
mr.save
+ # Ensure the project creator is creating the branches because the
+ # merge request author may not have access to push to this
+ # repository. The project owner may also be a group.
+ allow(project.repository).to receive(:add_branch).with(project.creator, anything, anything).and_call_original
+
importer.insert_git_data(mr, exists)
expect(project.repository.branch_exists?(mr.source_branch)).to be_truthy
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index 47233ea6ee2..41810a8ec03 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -179,6 +179,17 @@ describe Gitlab::GithubImport::Importer::RepositoryImporter do
describe '#import_repository' do
it 'imports the repository' do
+ repo = double(:repo, default_branch: 'develop')
+
+ expect(client)
+ .to receive(:repository)
+ .with('foo/bar')
+ .and_return(repo)
+
+ expect(project)
+ .to receive(:change_head)
+ .with('develop')
+
expect(project)
.to receive(:ensure_repository)
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
new file mode 100644
index 00000000000..2734fcef0a0
--- /dev/null
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -0,0 +1,69 @@
+require 'spec_helper'
+
+describe Gitlab::GroupSearchResults do
+ let(:user) { create(:user) }
+
+ describe 'user search' do
+ let(:group) { create(:group) }
+
+ it 'returns the users belonging to the group matching the search query' do
+ user1 = create(:user, username: 'gob_bluth')
+ create(:group_member, :developer, user: user1, group: group)
+
+ user2 = create(:user, username: 'michael_bluth')
+ create(:group_member, :developer, user: user2, group: group)
+
+ create(:user, username: 'gob_2018')
+
+ result = described_class.new(user, anything, group, 'gob').objects('users')
+
+ expect(result).to eq [user1]
+ end
+
+ it 'returns the user belonging to the subgroup matching the search query', :nested_groups do
+ user1 = create(:user, username: 'gob_bluth')
+ subgroup = create(:group, parent: group)
+ create(:group_member, :developer, user: user1, group: subgroup)
+
+ create(:user, username: 'gob_2018')
+
+ result = described_class.new(user, anything, group, 'gob').objects('users')
+
+ expect(result).to eq [user1]
+ end
+
+ it 'returns the user belonging to the parent group matching the search query', :nested_groups do
+ user1 = create(:user, username: 'gob_bluth')
+ parent_group = create(:group, children: [group])
+ create(:group_member, :developer, user: user1, group: parent_group)
+
+ create(:user, username: 'gob_2018')
+
+ result = described_class.new(user, anything, group, 'gob').objects('users')
+
+ expect(result).to eq [user1]
+ end
+
+ it 'does not return the user belonging to the private subgroup', :nested_groups do
+ user1 = create(:user, username: 'gob_bluth')
+ subgroup = create(:group, :private, parent: group)
+ create(:group_member, :developer, user: user1, group: subgroup)
+
+ create(:user, username: 'gob_2018')
+
+ result = described_class.new(user, anything, group, 'gob').objects('users')
+
+ expect(result).to eq []
+ end
+
+ it 'does not return the user belonging to an unrelated group' do
+ user = create(:user, username: 'gob_bluth')
+ unrelated_group = create(:group)
+ create(:group_member, :developer, user: user, group: unrelated_group)
+
+ result = described_class.new(user, anything, group, 'gob').objects('users')
+
+ expect(result).to eq []
+ end
+ end
+end
diff --git a/spec/lib/gitlab/hashed_storage/migrator_spec.rb b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
index 6154b3e2f76..8e253b51597 100644
--- a/spec/lib/gitlab/hashed_storage/migrator_spec.rb
+++ b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
@@ -1,6 +1,8 @@
+# frozen_string_literal: true
+
require 'spec_helper'
-describe Gitlab::HashedStorage::Migrator do
+describe Gitlab::HashedStorage::Migrator, :sidekiq, :redis do
describe '#bulk_schedule_migration' do
it 'schedules job to HashedStorage::MigratorWorker' do
Sidekiq::Testing.fake! do
@@ -182,4 +184,52 @@ describe Gitlab::HashedStorage::Migrator do
end
end
end
+
+ describe 'migration_pending?' do
+ set(:project) { create(:project, :empty_repo) }
+
+ it 'returns true when there are MigratorWorker jobs scheduled' do
+ Sidekiq::Testing.disable! do
+ ::HashedStorage::MigratorWorker.perform_async(1, 5)
+
+ expect(subject.migration_pending?).to be_truthy
+ end
+ end
+
+ it 'returns true when there are ProjectMigrateWorker jobs scheduled' do
+ Sidekiq::Testing.disable! do
+ ::HashedStorage::ProjectMigrateWorker.perform_async(1)
+
+ expect(subject.migration_pending?).to be_truthy
+ end
+ end
+
+ it 'returns false when queues are empty' do
+ expect(subject.migration_pending?).to be_falsey
+ end
+ end
+
+ describe 'rollback_pending?' do
+ set(:project) { create(:project, :empty_repo) }
+
+ it 'returns true when there are RollbackerWorker jobs scheduled' do
+ Sidekiq::Testing.disable! do
+ ::HashedStorage::RollbackerWorker.perform_async(1, 5)
+
+ expect(subject.rollback_pending?).to be_truthy
+ end
+ end
+
+ it 'returns true when there are jobs scheduled' do
+ Sidekiq::Testing.disable! do
+ ::HashedStorage::ProjectRollbackWorker.perform_async(1)
+
+ expect(subject.rollback_pending?).to be_truthy
+ end
+ end
+
+ it 'returns false when queues are empty' do
+ expect(subject.rollback_pending?).to be_falsey
+ end
+ end
end
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
index 2cae8ec031a..b82c09af306 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/lib/gitlab/json_cache_spec.rb
@@ -7,7 +7,7 @@ describe Gitlab::JsonCache do
let(:namespace) { 'geo' }
let(:key) { 'foo' }
let(:expanded_key) { "#{namespace}:#{key}:#{Rails.version}" }
- let(:broadcast_message) { create(:broadcast_message) }
+ set(:broadcast_message) { create(:broadcast_message) }
subject(:cache) { described_class.new(namespace: namespace, backend: backend) }
@@ -146,6 +146,18 @@ describe Gitlab::JsonCache do
expect(cache.read(key, BroadcastMessage)).to be_nil
end
+
+ it 'gracefully handles excluded fields from attributes during serialization' do
+ allow(backend).to receive(:read)
+ .with(expanded_key)
+ .and_return(broadcast_message.attributes.except("message_html").to_json)
+
+ result = cache.read(key, BroadcastMessage)
+
+ BroadcastMessage.cached_markdown_fields.html_fields.each do |field|
+ expect(result.public_send(field)).to be_nil
+ end
+ end
end
context 'when the cached value is an array' do
@@ -321,6 +333,46 @@ describe Gitlab::JsonCache do
expect(result).to be_new_record
end
+
+ it 'gracefully handles bad cached entry' do
+ allow(backend).to receive(:read)
+ .with(expanded_key)
+ .and_return('{')
+
+ result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+
+ expect(result).to eq 'block result'
+ end
+
+ it 'gracefully handles an empty hash' do
+ allow(backend).to receive(:read)
+ .with(expanded_key)
+ .and_return('{}')
+
+ expect(cache.fetch(key, as: BroadcastMessage)).to be_a(BroadcastMessage)
+ end
+
+ it 'gracefully handles unknown attributes' do
+ allow(backend).to receive(:read)
+ .with(expanded_key)
+ .and_return(broadcast_message.attributes.merge(unknown_attribute: 1).to_json)
+
+ result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+
+ expect(result).to eq 'block result'
+ end
+
+ it 'gracefully handles excluded fields from attributes during serialization' do
+ allow(backend).to receive(:read)
+ .with(expanded_key)
+ .and_return(broadcast_message.attributes.except("message_html").to_json)
+
+ result = cache.fetch(key, as: BroadcastMessage) { 'block result' }
+
+ BroadcastMessage.cached_markdown_fields.html_fields.each do |field|
+ expect(result.public_send(field)).to be_nil
+ end
+ end
end
it "returns the result of the block when 'as' option is nil" do
diff --git a/spec/lib/gitlab/kubernetes_spec.rb b/spec/lib/gitlab/kubernetes_spec.rb
index f326d57e9c6..57b570a9166 100644
--- a/spec/lib/gitlab/kubernetes_spec.rb
+++ b/spec/lib/gitlab/kubernetes_spec.rb
@@ -40,10 +40,40 @@ describe Gitlab::Kubernetes do
describe '#filter_by_label' do
it 'returns matching labels' do
- matching_items = [kube_pod(app: 'foo')]
+ matching_items = [kube_pod(track: 'foo'), kube_deployment(track: 'foo')]
+ items = matching_items + [kube_pod, kube_deployment]
+
+ expect(filter_by_label(items, 'track' => 'foo')).to eq(matching_items)
+ end
+ end
+
+ describe '#filter_by_annotation' do
+ it 'returns matching labels' do
+ matching_items = [kube_pod(environment_slug: 'foo'), kube_deployment(environment_slug: 'foo')]
+ items = matching_items + [kube_pod, kube_deployment]
+
+ expect(filter_by_annotation(items, 'app.gitlab.com/env' => 'foo')).to eq(matching_items)
+ end
+ end
+
+ describe '#filter_by_project_environment' do
+ let(:matching_pod) { kube_pod(environment_slug: 'production', project_slug: 'my-cool-app') }
+
+ it 'returns matching legacy env label' do
+ matching_pod['metadata']['annotations'].delete('app.gitlab.com/app')
+ matching_pod['metadata']['annotations'].delete('app.gitlab.com/env')
+ matching_pod['metadata']['labels']['app'] = 'production'
+ matching_items = [matching_pod]
+ items = matching_items + [kube_pod]
+
+ expect(filter_by_project_environment(items, 'my-cool-app', 'production')).to eq(matching_items)
+ end
+
+ it 'returns matching env label' do
+ matching_items = [matching_pod]
items = matching_items + [kube_pod]
- expect(filter_by_label(items, app: 'foo')).to eq(matching_items)
+ expect(filter_by_project_environment(items, 'my-cool-app', 'production')).to eq(matching_items)
end
end
diff --git a/spec/lib/gitlab/middleware/basic_health_check_spec.rb b/spec/lib/gitlab/middleware/basic_health_check_spec.rb
index 187d903a5e1..86bdc479b66 100644
--- a/spec/lib/gitlab/middleware/basic_health_check_spec.rb
+++ b/spec/lib/gitlab/middleware/basic_health_check_spec.rb
@@ -28,6 +28,35 @@ describe Gitlab::Middleware::BasicHealthCheck do
end
end
+ context 'with X-Forwarded-For headers' do
+ let(:load_balancer_ip) { '1.2.3.4' }
+
+ before do
+ env['HTTP_X_FORWARDED_FOR'] = "#{load_balancer_ip}, 127.0.0.1"
+ env['REMOTE_ADDR'] = '127.0.0.1'
+ env['PATH_INFO'] = described_class::HEALTH_PATH
+ end
+
+ it 'returns 200 response when endpoint is allowed' do
+ allow(Settings.monitoring).to receive(:ip_whitelist).and_return([load_balancer_ip])
+ expect(app).not_to receive(:call)
+
+ response = middleware.call(env)
+
+ expect(response[0]).to eq(200)
+ expect(response[1]).to eq({ 'Content-Type' => 'text/plain' })
+ expect(response[2]).to eq(['GitLab OK'])
+ end
+
+ it 'returns 404 when whitelist is not configured' do
+ allow(Settings.monitoring).to receive(:ip_whitelist).and_return([])
+
+ response = middleware.call(env)
+
+ expect(response[0]).to eq(404)
+ end
+ end
+
context 'whitelisted IP' do
before do
env['REMOTE_ADDR'] = '127.0.0.1'
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 6831274d37c..4a41d5cf51e 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -412,4 +412,36 @@ describe Gitlab::ProjectSearchResults do
end
end
end
+
+ describe 'user search' do
+ it 'returns the user belonging to the project matching the search query' do
+ project = create(:project)
+
+ user1 = create(:user, username: 'gob_bluth')
+ create(:project_member, :developer, user: user1, project: project)
+
+ user2 = create(:user, username: 'michael_bluth')
+ create(:project_member, :developer, user: user2, project: project)
+
+ create(:user, username: 'gob_2018')
+
+ result = described_class.new(user, project, 'gob').objects('users')
+
+ expect(result).to eq [user1]
+ end
+
+ it 'returns the user belonging to the group matching the search query' do
+ group = create(:group)
+ project = create(:project, namespace: group)
+
+ user1 = create(:user, username: 'gob_bluth')
+ create(:group_member, :developer, user: user1, group: group)
+
+ create(:user, username: 'gob_2018')
+
+ result = described_class.new(user, project, 'gob').objects('users')
+
+ expect(result).to eq [user1]
+ end
+ end
end
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index 4139d1c650c..d982053d92e 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Gitlab::ReferenceExtractor do
diff --git a/spec/lib/gitlab/request_context_spec.rb b/spec/lib/gitlab/request_context_spec.rb
index fd443cc1f71..23e45aff1c5 100644
--- a/spec/lib/gitlab/request_context_spec.rb
+++ b/spec/lib/gitlab/request_context_spec.rb
@@ -6,6 +6,31 @@ describe Gitlab::RequestContext do
let(:app) { -> (env) {} }
let(:env) { Hash.new }
+ context 'with X-Forwarded-For headers', :request_store do
+ let(:load_balancer_ip) { '1.2.3.4' }
+ let(:headers) do
+ {
+ 'HTTP_X_FORWARDED_FOR' => "#{load_balancer_ip}, 127.0.0.1",
+ 'REMOTE_ADDR' => '127.0.0.1'
+ }
+ end
+
+ let(:env) { Rack::MockRequest.env_for("/").merge(headers) }
+
+ it 'returns the load balancer IP' do
+ client_ip = nil
+
+ endpoint = proc do
+ client_ip = Gitlab::SafeRequestStore[:client_ip]
+ [200, {}, ["Hello"]]
+ end
+
+ described_class.new(endpoint).call(env)
+
+ expect(client_ip).to eq(load_balancer_ip)
+ end
+ end
+
context 'when RequestStore::Middleware is used' do
around do |example|
RequestStore::Middleware.new(-> (env) { example.run }).call({})
@@ -15,7 +40,7 @@ describe Gitlab::RequestContext do
let(:ip) { '192.168.1.11' }
before do
- allow_any_instance_of(ActionDispatch::Request).to receive(:ip).and_return(ip)
+ allow_any_instance_of(Rack::Request).to receive(:ip).and_return(ip)
described_class.new(app).call(env)
end
diff --git a/spec/lib/gitlab/route_map_spec.rb b/spec/lib/gitlab/route_map_spec.rb
index d672f7b5675..a39c774429e 100644
--- a/spec/lib/gitlab/route_map_spec.rb
+++ b/spec/lib/gitlab/route_map_spec.rb
@@ -60,7 +60,7 @@ describe Gitlab::RouteMap do
subject do
map = described_class.new(<<-"MAP".strip_heredoc)
- - source: '#{malicious_regexp}'
+ - source: '#{malicious_regexp_re2}'
public: '/'
MAP
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 87288baedb0..4b57eecff93 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -121,6 +121,22 @@ describe Gitlab::SearchResults do
results.objects('issues')
end
end
+
+ describe '#users' do
+ it 'does not call the UsersFinder when the current_user is not allowed to read users list' do
+ allow(Ability).to receive(:allowed?).and_return(false)
+
+ expect(UsersFinder).not_to receive(:new).with(user, search: 'foo').and_call_original
+
+ results.objects('users')
+ end
+
+ it 'calls the UsersFinder' do
+ expect(UsersFinder).to receive(:new).with(user, search: 'foo').and_call_original
+
+ results.objects('users')
+ end
+ end
end
it 'does not list issues on private projects' do
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index d6aadf0f7de..e2f09de2808 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -8,6 +8,7 @@ describe Gitlab::Shell do
let(:gitlab_shell) { described_class.new }
let(:popen_vars) { { 'GIT_TERMINAL_PROMPT' => ENV['GIT_TERMINAL_PROMPT'] } }
let(:timeout) { Gitlab.config.gitlab_shell.git_timeout }
+ let(:gitlab_authorized_keys) { double }
before do
allow(Project).to receive(:find).and_return(project)
@@ -49,13 +50,38 @@ describe Gitlab::Shell do
describe '#add_key' do
context 'when authorized_keys_enabled is true' do
- it 'removes trailing garbage' do
- allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
- expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
- [:gitlab_shell_keys_path, 'add-key', 'key-123', 'ssh-rsa foobar']
- )
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ allow(gitlab_shell)
+ .to receive(:gitlab_shell_keys_path)
+ .and_return(:gitlab_shell_keys_path)
+ end
+
+ it 'calls #gitlab_shell_fast_execute with add-key command' do
+ expect(gitlab_shell)
+ .to receive(:gitlab_shell_fast_execute)
+ .with([
+ :gitlab_shell_keys_path,
+ 'add-key',
+ 'key-123',
+ 'ssh-rsa foobar'
+ ])
+
+ gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
+ end
+ end
+
+ context 'authorized_keys_file set' do
+ it 'calls Gitlab::AuthorizedKeys#add_key with id and key' do
+ expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
- gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
+ expect(gitlab_authorized_keys)
+ .to receive(:add_key)
+ .with('key-123', 'ssh-rsa foobar')
+
+ gitlab_shell.add_key('key-123', 'ssh-rsa foobar')
+ end
end
end
@@ -64,10 +90,24 @@ describe Gitlab::Shell do
stub_application_setting(authorized_keys_enabled: false)
end
- it 'does nothing' do
- expect(gitlab_shell).not_to receive(:gitlab_shell_fast_execute)
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ end
+
+ it 'does nothing' do
+ expect(gitlab_shell).not_to receive(:gitlab_shell_fast_execute)
+
+ gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
+ end
+ end
+
+ context 'authorized_keys_file set' do
+ it 'does nothing' do
+ expect(Gitlab::AuthorizedKeys).not_to receive(:new)
- gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
+ gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
+ end
end
end
@@ -76,24 +116,89 @@ describe Gitlab::Shell do
stub_application_setting(authorized_keys_enabled: nil)
end
- it 'removes trailing garbage' do
- allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
- expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
- [:gitlab_shell_keys_path, 'add-key', 'key-123', 'ssh-rsa foobar']
- )
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ allow(gitlab_shell)
+ .to receive(:gitlab_shell_keys_path)
+ .and_return(:gitlab_shell_keys_path)
+ end
+
+ it 'calls #gitlab_shell_fast_execute with add-key command' do
+ expect(gitlab_shell)
+ .to receive(:gitlab_shell_fast_execute)
+ .with([
+ :gitlab_shell_keys_path,
+ 'add-key',
+ 'key-123',
+ 'ssh-rsa foobar'
+ ])
+
+ gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
+ end
+ end
+
+ context 'authorized_keys_file set' do
+ it 'calls Gitlab::AuthorizedKeys#add_key with id and key' do
+ expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
+
+ expect(gitlab_authorized_keys)
+ .to receive(:add_key)
+ .with('key-123', 'ssh-rsa foobar')
- gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
+ gitlab_shell.add_key('key-123', 'ssh-rsa foobar')
+ end
end
end
end
describe '#batch_add_keys' do
+ let(:keys) { [double(shell_id: 'key-123', key: 'ssh-rsa foobar')] }
+
context 'when authorized_keys_enabled is true' do
- it 'instantiates KeyAdder' do
- expect_any_instance_of(Gitlab::Shell::KeyAdder).to receive(:add_key).with('key-123', 'ssh-rsa foobar')
+ context 'authorized_keys_file not set' do
+ let(:io) { double }
+
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ end
+
+ context 'valid keys' do
+ before do
+ allow(gitlab_shell)
+ .to receive(:gitlab_shell_keys_path)
+ .and_return(:gitlab_shell_keys_path)
+ end
+
+ it 'calls gitlab-keys with batch-add-keys command' do
+ expect(IO)
+ .to receive(:popen)
+ .with("gitlab_shell_keys_path batch-add-keys", 'w')
+ .and_yield(io)
+
+ expect(io).to receive(:puts).with("key-123\tssh-rsa foobar")
+ expect(gitlab_shell.batch_add_keys(keys)).to be_truthy
+ end
+ end
+
+ context 'invalid keys' do
+ let(:keys) { [double(shell_id: 'key-123', key: "ssh-rsa A\tSDFA\nSGADG")] }
+
+ it 'catches failure and returns false' do
+ expect(gitlab_shell.batch_add_keys(keys)).to be_falsey
+ end
+ end
+ end
- gitlab_shell.batch_add_keys do |adder|
- adder.add_key('key-123', 'ssh-rsa foobar')
+ context 'authorized_keys_file set' do
+ it 'calls Gitlab::AuthorizedKeys#batch_add_keys with keys to be added' do
+ expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
+
+ expect(gitlab_authorized_keys)
+ .to receive(:batch_add_keys)
+ .with(keys)
+
+ gitlab_shell.batch_add_keys(keys)
end
end
end
@@ -103,11 +208,23 @@ describe Gitlab::Shell do
stub_application_setting(authorized_keys_enabled: false)
end
- it 'does nothing' do
- expect_any_instance_of(Gitlab::Shell::KeyAdder).not_to receive(:add_key)
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ end
+
+ it 'does nothing' do
+ expect(IO).not_to receive(:popen)
+
+ gitlab_shell.batch_add_keys(keys)
+ end
+ end
+
+ context 'authorized_keys_file set' do
+ it 'does nothing' do
+ expect(Gitlab::AuthorizedKeys).not_to receive(:new)
- gitlab_shell.batch_add_keys do |adder|
- adder.add_key('key-123', 'ssh-rsa foobar')
+ gitlab_shell.batch_add_keys(keys)
end
end
end
@@ -117,11 +234,37 @@ describe Gitlab::Shell do
stub_application_setting(authorized_keys_enabled: nil)
end
- it 'instantiates KeyAdder' do
- expect_any_instance_of(Gitlab::Shell::KeyAdder).to receive(:add_key).with('key-123', 'ssh-rsa foobar')
+ context 'authorized_keys_file not set' do
+ let(:io) { double }
- gitlab_shell.batch_add_keys do |adder|
- adder.add_key('key-123', 'ssh-rsa foobar')
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ allow(gitlab_shell)
+ .to receive(:gitlab_shell_keys_path)
+ .and_return(:gitlab_shell_keys_path)
+ end
+
+ it 'calls gitlab-keys with batch-add-keys command' do
+ expect(IO)
+ .to receive(:popen)
+ .with("gitlab_shell_keys_path batch-add-keys", 'w')
+ .and_yield(io)
+
+ expect(io).to receive(:puts).with("key-123\tssh-rsa foobar")
+
+ gitlab_shell.batch_add_keys(keys)
+ end
+ end
+
+ context 'authorized_keys_file set' do
+ it 'calls Gitlab::AuthorizedKeys#batch_add_keys with keys to be added' do
+ expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
+
+ expect(gitlab_authorized_keys)
+ .to receive(:batch_add_keys)
+ .with(keys)
+
+ gitlab_shell.batch_add_keys(keys)
end
end
end
@@ -129,13 +272,34 @@ describe Gitlab::Shell do
describe '#remove_key' do
context 'when authorized_keys_enabled is true' do
- it 'removes trailing garbage' do
- allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
- expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
- [:gitlab_shell_keys_path, 'rm-key', 'key-123', 'ssh-rsa foobar']
- )
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ allow(gitlab_shell)
+ .to receive(:gitlab_shell_keys_path)
+ .and_return(:gitlab_shell_keys_path)
+ end
- gitlab_shell.remove_key('key-123', 'ssh-rsa foobar')
+ it 'calls #gitlab_shell_fast_execute with rm-key command' do
+ expect(gitlab_shell)
+ .to receive(:gitlab_shell_fast_execute)
+ .with([
+ :gitlab_shell_keys_path,
+ 'rm-key',
+ 'key-123'
+ ])
+
+ gitlab_shell.remove_key('key-123')
+ end
+ end
+
+ context 'authorized_keys_file not set' do
+ it 'calls Gitlab::AuthorizedKeys#rm_key with the key to be removed' do
+ expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
+ expect(gitlab_authorized_keys).to receive(:rm_key).with('key-123')
+
+ gitlab_shell.remove_key('key-123')
+ end
end
end
@@ -144,10 +308,24 @@ describe Gitlab::Shell do
stub_application_setting(authorized_keys_enabled: false)
end
- it 'does nothing' do
- expect(gitlab_shell).not_to receive(:gitlab_shell_fast_execute)
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ end
+
+ it 'does nothing' do
+ expect(gitlab_shell).not_to receive(:gitlab_shell_fast_execute)
- gitlab_shell.remove_key('key-123', 'ssh-rsa foobar')
+ gitlab_shell.remove_key('key-123')
+ end
+ end
+
+ context 'authorized_keys_file set' do
+ it 'does nothing' do
+ expect(Gitlab::AuthorizedKeys).not_to receive(:new)
+
+ gitlab_shell.remove_key('key-123')
+ end
end
end
@@ -156,232 +334,256 @@ describe Gitlab::Shell do
stub_application_setting(authorized_keys_enabled: nil)
end
- it 'removes trailing garbage' do
- allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
- expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
- [:gitlab_shell_keys_path, 'rm-key', 'key-123', 'ssh-rsa foobar']
- )
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ allow(gitlab_shell)
+ .to receive(:gitlab_shell_keys_path)
+ .and_return(:gitlab_shell_keys_path)
+ end
+
+ it 'calls #gitlab_shell_fast_execute with rm-key command' do
+ expect(gitlab_shell)
+ .to receive(:gitlab_shell_fast_execute)
+ .with([
+ :gitlab_shell_keys_path,
+ 'rm-key',
+ 'key-123'
+ ])
- gitlab_shell.remove_key('key-123', 'ssh-rsa foobar')
+ gitlab_shell.remove_key('key-123')
+ end
end
- end
- context 'when key content is not given' do
- it 'calls rm-key with only one argument' do
- allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
- expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
- [:gitlab_shell_keys_path, 'rm-key', 'key-123']
- )
+ context 'authorized_keys_file not set' do
+ it 'calls Gitlab::AuthorizedKeys#rm_key with the key to be removed' do
+ expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
+ expect(gitlab_authorized_keys).to receive(:rm_key).with('key-123')
- gitlab_shell.remove_key('key-123')
+ gitlab_shell.remove_key('key-123')
+ end
end
end
end
describe '#remove_all_keys' do
context 'when authorized_keys_enabled is true' do
- it 'removes trailing garbage' do
- allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
- expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with([:gitlab_shell_keys_path, 'clear'])
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ allow(gitlab_shell)
+ .to receive(:gitlab_shell_keys_path)
+ .and_return(:gitlab_shell_keys_path)
+ end
- gitlab_shell.remove_all_keys
- end
- end
+ it 'calls #gitlab_shell_fast_execute with clear command' do
+ expect(gitlab_shell)
+ .to receive(:gitlab_shell_fast_execute)
+ .with([:gitlab_shell_keys_path, 'clear'])
- context 'when authorized_keys_enabled is false' do
- before do
- stub_application_setting(authorized_keys_enabled: false)
+ gitlab_shell.remove_all_keys
+ end
end
- it 'does nothing' do
- expect(gitlab_shell).not_to receive(:gitlab_shell_fast_execute)
+ context 'authorized_keys_file set' do
+ it 'calls Gitlab::AuthorizedKeys#clear' do
+ expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
+ expect(gitlab_authorized_keys).to receive(:clear)
- gitlab_shell.remove_all_keys
+ gitlab_shell.remove_all_keys
+ end
end
end
- context 'when authorized_keys_enabled is nil' do
+ context 'when authorized_keys_enabled is false' do
before do
- stub_application_setting(authorized_keys_enabled: nil)
+ stub_application_setting(authorized_keys_enabled: false)
end
- it 'removes trailing garbage' do
- allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
- expect(gitlab_shell).to receive(:gitlab_shell_fast_execute).with(
- [:gitlab_shell_keys_path, 'clear']
- )
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ end
- gitlab_shell.remove_all_keys
- end
- end
- end
+ it 'does nothing' do
+ expect(gitlab_shell).not_to receive(:gitlab_shell_fast_execute)
- describe '#remove_keys_not_found_in_db' do
- context 'when keys are in the file that are not in the DB' do
- before do
- gitlab_shell.remove_all_keys
- gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
- gitlab_shell.add_key('key-9876', 'ssh-rsa ASDFASDF')
- @another_key = create(:key) # this one IS in the DB
+ gitlab_shell.remove_all_keys
+ end
end
- it 'removes the keys' do
- expect(find_in_authorized_keys_file(1234)).to be_truthy
- expect(find_in_authorized_keys_file(9876)).to be_truthy
- expect(find_in_authorized_keys_file(@another_key.id)).to be_truthy
- gitlab_shell.remove_keys_not_found_in_db
- expect(find_in_authorized_keys_file(1234)).to be_falsey
- expect(find_in_authorized_keys_file(9876)).to be_falsey
- expect(find_in_authorized_keys_file(@another_key.id)).to be_truthy
+ context 'authorized_keys_file set' do
+ it 'does nothing' do
+ expect(Gitlab::AuthorizedKeys).not_to receive(:new)
+
+ gitlab_shell.remove_all_keys
+ end
end
end
- context 'when keys there are duplicate keys in the file that are not in the DB' do
+ context 'when authorized_keys_enabled is nil' do
before do
- gitlab_shell.remove_all_keys
- gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
- gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
+ stub_application_setting(authorized_keys_enabled: nil)
end
- it 'removes the keys' do
- expect(find_in_authorized_keys_file(1234)).to be_truthy
- gitlab_shell.remove_keys_not_found_in_db
- expect(find_in_authorized_keys_file(1234)).to be_falsey
- end
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ allow(gitlab_shell)
+ .to receive(:gitlab_shell_keys_path)
+ .and_return(:gitlab_shell_keys_path)
+ end
- it 'does not run remove more than once per key (in a batch)' do
- expect(gitlab_shell).to receive(:remove_key).with('key-1234').once
- gitlab_shell.remove_keys_not_found_in_db
- end
- end
+ it 'calls #gitlab_shell_fast_execute with clear command' do
+ expect(gitlab_shell)
+ .to receive(:gitlab_shell_fast_execute)
+ .with([:gitlab_shell_keys_path, 'clear'])
- context 'when keys there are duplicate keys in the file that ARE in the DB' do
- before do
- gitlab_shell.remove_all_keys
- @key = create(:key)
- gitlab_shell.add_key(@key.shell_id, @key.key)
+ gitlab_shell.remove_all_keys
+ end
end
- it 'does not remove the key' do
- gitlab_shell.remove_keys_not_found_in_db
- expect(find_in_authorized_keys_file(@key.id)).to be_truthy
- end
+ context 'authorized_keys_file set' do
+ it 'calls Gitlab::AuthorizedKeys#clear' do
+ expect(Gitlab::AuthorizedKeys).to receive(:new).and_return(gitlab_authorized_keys)
+ expect(gitlab_authorized_keys).to receive(:clear)
- it 'does not need to run a SELECT query for that batch, on account of that key' do
- expect_any_instance_of(ActiveRecord::Relation).not_to receive(:pluck)
- gitlab_shell.remove_keys_not_found_in_db
+ gitlab_shell.remove_all_keys
+ end
end
end
+ end
- unless ENV['CI'] # Skip in CI, it takes 1 minute
- context 'when the first batch can be skipped, but the next batch has keys that are not in the DB' do
+ describe '#remove_keys_not_found_in_db' do
+ context 'when keys are in the file that are not in the DB' do
+ context 'authorized_keys_file not set' do
before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
gitlab_shell.remove_all_keys
- 100.times { |i| create(:key) } # first batch is all in the DB
gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
+ gitlab_shell.add_key('key-9876', 'ssh-rsa ASDFASDF')
+ @another_key = create(:key) # this one IS in the DB
end
- it 'removes the keys not in the DB' do
- expect(find_in_authorized_keys_file(1234)).to be_truthy
+ it 'removes the keys' do
+ expect(gitlab_shell).to receive(:remove_key).with('key-1234')
+ expect(gitlab_shell).to receive(:remove_key).with('key-9876')
+ expect(gitlab_shell).not_to receive(:remove_key).with("key-#{@another_key.id}")
+
gitlab_shell.remove_keys_not_found_in_db
- expect(find_in_authorized_keys_file(1234)).to be_falsey
end
end
- end
- end
- describe '#batch_read_key_ids' do
- context 'when there are keys in the authorized_keys file' do
- before do
- gitlab_shell.remove_all_keys
- (1..4).each do |i|
- gitlab_shell.add_key("key-#{i}", "ssh-rsa ASDFASDF#{i}")
+ context 'authorized_keys_file set' do
+ before do
+ gitlab_shell.remove_all_keys
+ gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
+ gitlab_shell.add_key('key-9876', 'ssh-rsa ASDFASDF')
+ @another_key = create(:key) # this one IS in the DB
end
- end
- it 'iterates over the key IDs in the file, in batches' do
- loop_count = 0
- first_batch = [1, 2]
- second_batch = [3, 4]
+ it 'removes the keys' do
+ expect(gitlab_shell).to receive(:remove_key).with('key-1234')
+ expect(gitlab_shell).to receive(:remove_key).with('key-9876')
+ expect(gitlab_shell).not_to receive(:remove_key).with("key-#{@another_key.id}")
- gitlab_shell.batch_read_key_ids(batch_size: 2) do |batch|
- expected = (loop_count == 0 ? first_batch : second_batch)
- expect(batch).to eq(expected)
- loop_count += 1
+ gitlab_shell.remove_keys_not_found_in_db
end
end
end
- end
- describe '#list_key_ids' do
- context 'when there are keys in the authorized_keys file' do
- before do
- gitlab_shell.remove_all_keys
- (1..4).each do |i|
- gitlab_shell.add_key("key-#{i}", "ssh-rsa ASDFASDF#{i}")
+ context 'when keys there are duplicate keys in the file that are not in the DB' do
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ gitlab_shell.remove_all_keys
+ gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
+ gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
+ end
+
+ it 'removes the keys' do
+ expect(gitlab_shell).to receive(:remove_key).with('key-1234')
+
+ gitlab_shell.remove_keys_not_found_in_db
end
end
- it 'outputs the key IDs in the file, separated by newlines' do
- ids = []
- gitlab_shell.list_key_ids do |io|
- io.each do |line|
- ids << line
- end
+ context 'authorized_keys_file set' do
+ before do
+ gitlab_shell.remove_all_keys
+ gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
+ gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
end
- expect(ids).to eq(%W{1\n 2\n 3\n 4\n})
- end
- end
+ it 'removes the keys' do
+ expect(gitlab_shell).to receive(:remove_key).with('key-1234')
- context 'when there are no keys in the authorized_keys file' do
- before do
- gitlab_shell.remove_all_keys
+ gitlab_shell.remove_keys_not_found_in_db
+ end
end
+ end
- it 'outputs nothing, not even an empty string' do
- ids = []
- gitlab_shell.list_key_ids do |io|
- io.each do |line|
- ids << line
- end
+ context 'when keys there are duplicate keys in the file that ARE in the DB' do
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ gitlab_shell.remove_all_keys
+ @key = create(:key)
+ gitlab_shell.add_key(@key.shell_id, @key.key)
end
- expect(ids).to eq([])
+ it 'does not remove the key' do
+ expect(gitlab_shell).not_to receive(:remove_key).with("key-#{@key.id}")
+
+ gitlab_shell.remove_keys_not_found_in_db
+ end
end
- end
- end
- describe Gitlab::Shell::KeyAdder do
- describe '#add_key' do
- it 'removes trailing garbage' do
- io = spy(:io)
- adder = described_class.new(io)
+ context 'authorized_keys_file set' do
+ before do
+ gitlab_shell.remove_all_keys
+ @key = create(:key)
+ gitlab_shell.add_key(@key.shell_id, @key.key)
+ end
- adder.add_key('key-42', "ssh-rsa foo bar\tbaz")
+ it 'does not remove the key' do
+ expect(gitlab_shell).not_to receive(:remove_key).with("key-#{@key.id}")
- expect(io).to have_received(:puts).with("key-42\tssh-rsa foo")
+ gitlab_shell.remove_keys_not_found_in_db
+ end
end
+ end
- it 'handles multiple spaces in the key' do
- io = spy(:io)
- adder = described_class.new(io)
+ unless ENV['CI'] # Skip in CI, it takes 1 minute
+ context 'when the first batch can be skipped, but the next batch has keys that are not in the DB' do
+ context 'authorized_keys_file not set' do
+ before do
+ stub_gitlab_shell_setting(authorized_keys_file: nil)
+ gitlab_shell.remove_all_keys
+ 100.times { |i| create(:key) } # first batch is all in the DB
+ gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
+ end
- adder.add_key('key-42', "ssh-rsa foo")
+ it 'removes the keys not in the DB' do
+ expect(gitlab_shell).to receive(:remove_key).with('key-1234')
- expect(io).to have_received(:puts).with("key-42\tssh-rsa foo")
- end
+ gitlab_shell.remove_keys_not_found_in_db
+ end
+ end
- it 'raises an exception if the key contains a tab' do
- expect do
- described_class.new(StringIO.new).add_key('key-42', "ssh-rsa\tfoobar")
- end.to raise_error(Gitlab::Shell::Error)
- end
+ context 'authorized_keys_file set' do
+ before do
+ gitlab_shell.remove_all_keys
+ 100.times { |i| create(:key) } # first batch is all in the DB
+ gitlab_shell.add_key('key-1234', 'ssh-rsa ASDFASDF')
+ end
+
+ it 'removes the keys not in the DB' do
+ expect(gitlab_shell).to receive(:remove_key).with('key-1234')
- it 'raises an exception if the key contains a newline' do
- expect do
- described_class.new(StringIO.new).add_key('key-42', "ssh-rsa foobar\nssh-rsa pawned")
- end.to raise_error(Gitlab::Shell::Error)
+ gitlab_shell.remove_keys_not_found_in_db
+ end
+ end
end
end
end
@@ -566,12 +768,4 @@ describe Gitlab::Shell do
end
end
end
-
- def find_in_authorized_keys_file(key_id)
- gitlab_shell.batch_read_key_ids do |ids|
- return true if ids.include?(key_id) # rubocop:disable Cop/AvoidReturnFromBlocks
- end
-
- false
- end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
index ff8c0825ee4..1a5a38b5d99 100644
--- a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
@@ -54,7 +54,7 @@ describe Gitlab::SidekiqMiddleware::MemoryKiller do
expect(Process).to receive(:kill).with('SIGTSTP', pid).ordered
expect(Process).to receive(:kill).with('SIGTERM', pid).ordered
- expect(Process).to receive(:kill).with('SIGKILL', "-#{pid}").ordered
+ expect(Process).to receive(:kill).with('SIGKILL', 0).ordered
run
end
diff --git a/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb b/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
new file mode 100644
index 00000000000..005d41580de
--- /dev/null
+++ b/spec/lib/gitlab/untrusted_regexp/ruby_syntax_spec.rb
@@ -0,0 +1,72 @@
+require 'fast_spec_helper'
+require 'support/shared_examples/malicious_regexp_shared_examples'
+
+describe Gitlab::UntrustedRegexp::RubySyntax do
+ describe '.matches_syntax?' do
+ it 'returns true if regexp is valid' do
+ expect(described_class.matches_syntax?('/some .* thing/'))
+ .to be true
+ end
+
+ it 'returns true if regexp is invalid, but resembles regexp' do
+ expect(described_class.matches_syntax?('/some ( thing/'))
+ .to be true
+ end
+ end
+
+ describe '.valid?' do
+ it 'returns true if regexp is valid' do
+ expect(described_class.valid?('/some .* thing/'))
+ .to be true
+ end
+
+ it 'returns false if regexp is invalid' do
+ expect(described_class.valid?('/some ( thing/'))
+ .to be false
+ end
+ end
+
+ describe '.fabricate' do
+ context 'when regexp is valid' do
+ it 'fabricates regexp without flags' do
+ expect(described_class.fabricate('/some .* thing/')).not_to be_nil
+ end
+ end
+
+ context 'when regexp is a raw pattern' do
+ it 'returns error' do
+ expect(described_class.fabricate('some .* thing')).to be_nil
+ end
+ end
+ end
+
+ describe '.fabricate!' do
+ context 'when regexp is using /regexp/ scheme with flags' do
+ it 'fabricates regexp with a single flag' do
+ regexp = described_class.fabricate!('/something/i')
+
+ expect(regexp).to eq Gitlab::UntrustedRegexp.new('(?i)something')
+ expect(regexp.scan('SOMETHING')).to be_one
+ end
+
+ it 'fabricates regexp with multiple flags' do
+ regexp = described_class.fabricate!('/something/im')
+
+ expect(regexp).to eq Gitlab::UntrustedRegexp.new('(?im)something')
+ end
+
+ it 'fabricates regexp without flags' do
+ regexp = described_class.fabricate!('/something/')
+
+ expect(regexp).to eq Gitlab::UntrustedRegexp.new('something')
+ end
+ end
+
+ context 'when regexp is a raw pattern' do
+ it 'raises an error' do
+ expect { described_class.fabricate!('some .* thing') }
+ .to raise_error(RegexpError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/untrusted_regexp_spec.rb b/spec/lib/gitlab/untrusted_regexp_spec.rb
index 0a6ac0aa294..9d483f13a5e 100644
--- a/spec/lib/gitlab/untrusted_regexp_spec.rb
+++ b/spec/lib/gitlab/untrusted_regexp_spec.rb
@@ -2,48 +2,6 @@ require 'fast_spec_helper'
require 'support/shared_examples/malicious_regexp_shared_examples'
describe Gitlab::UntrustedRegexp do
- describe '.valid?' do
- it 'returns true if regexp is valid' do
- expect(described_class.valid?('/some ( thing/'))
- .to be false
- end
-
- it 'returns true if regexp is invalid' do
- expect(described_class.valid?('/some .* thing/'))
- .to be true
- end
- end
-
- describe '.fabricate' do
- context 'when regexp is using /regexp/ scheme with flags' do
- it 'fabricates regexp with a single flag' do
- regexp = described_class.fabricate('/something/i')
-
- expect(regexp).to eq described_class.new('(?i)something')
- expect(regexp.scan('SOMETHING')).to be_one
- end
-
- it 'fabricates regexp with multiple flags' do
- regexp = described_class.fabricate('/something/im')
-
- expect(regexp).to eq described_class.new('(?im)something')
- end
-
- it 'fabricates regexp without flags' do
- regexp = described_class.fabricate('/something/')
-
- expect(regexp).to eq described_class.new('something')
- end
- end
-
- context 'when regexp is a raw pattern' do
- it 'raises an error' do
- expect { described_class.fabricate('some .* thing') }
- .to raise_error(RegexpError)
- end
- end
- end
-
describe '#initialize' do
subject { described_class.new(pattern) }
@@ -92,11 +50,41 @@ describe Gitlab::UntrustedRegexp do
end
end
+ describe '#match?' do
+ subject { described_class.new(regexp).match?(text) }
+
+ context 'malicious regexp' do
+ let(:text) { malicious_text }
+ let(:regexp) { malicious_regexp_re2 }
+
+ include_examples 'malicious regexp'
+ end
+
+ context 'matching regexp' do
+ let(:regexp) { 'foo' }
+ let(:text) { 'foo' }
+
+ it 'returns an array of nil matches' do
+ is_expected.to eq(true)
+ end
+ end
+
+ context 'non-matching regexp' do
+ let(:regexp) { 'boo' }
+ let(:text) { 'foo' }
+
+ it 'returns an array of nil matches' do
+ is_expected.to eq(false)
+ end
+ end
+ end
+
describe '#scan' do
subject { described_class.new(regexp).scan(text) }
+
context 'malicious regexp' do
let(:text) { malicious_text }
- let(:regexp) { malicious_regexp }
+ let(:regexp) { malicious_regexp_re2 }
include_examples 'malicious regexp'
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index cd9e4d48cd1..549cc5ac057 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -13,6 +13,8 @@ describe Gitlab::UsageData do
create(:service, project: projects[0], type: 'SlackSlashCommandsService', active: true)
create(:service, project: projects[1], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'SlackService', active: true)
+ create(:project_error_tracking_setting, project: projects[0])
+ create(:project_error_tracking_setting, project: projects[1], enabled: false)
gcp_cluster = create(:cluster, :provided_by_gcp)
create(:cluster, :provided_by_user)
@@ -117,6 +119,7 @@ describe Gitlab::UsageData do
projects_slack_slash_active
projects_prometheus_active
projects_with_repositories_enabled
+ projects_with_error_tracking_enabled
pages_domains
protected_branches
releases
@@ -146,6 +149,7 @@ describe Gitlab::UsageData do
expect(count_data[:projects_slack_notifications_active]).to eq(2)
expect(count_data[:projects_slack_slash_active]).to eq(1)
expect(count_data[:projects_with_repositories_enabled]).to eq(2)
+ expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
expect(count_data[:clusters_enabled]).to eq(7)
expect(count_data[:project_clusters_enabled]).to eq(6)
diff --git a/spec/lib/gitlab/user_extractor_spec.rb b/spec/lib/gitlab/user_extractor_spec.rb
index fcc05ab3a0c..b86ec5445b8 100644
--- a/spec/lib/gitlab/user_extractor_spec.rb
+++ b/spec/lib/gitlab/user_extractor_spec.rb
@@ -38,6 +38,18 @@ describe Gitlab::UserExtractor do
expect(extractor.users).to include(user)
end
+
+ context 'input as array of strings' do
+ it 'is treated as one string' do
+ extractor = described_class.new(text.lines)
+
+ user_1 = create(:user, username: "USER-1")
+ user_4 = create(:user, username: "USER-4")
+ user_email = create(:user, email: 'user@gitlab.org')
+
+ expect(extractor.users).to contain_exactly(user_1, user_4, user_email)
+ end
+ end
end
describe '#matches' do
@@ -48,6 +60,14 @@ describe Gitlab::UserExtractor do
it 'includes all mentioned usernames' do
expect(extractor.matches[:usernames]).to contain_exactly('user-1', 'user-2', 'user-4')
end
+
+ context 'input has no matching e-mail or usernames' do
+ it 'returns an empty list of users' do
+ extractor = described_class.new('My test')
+
+ expect(extractor.users).to be_empty
+ end
+ end
end
describe '#references' do
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 8f5029b3565..4645339f439 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -213,4 +213,22 @@ describe Gitlab::Utils do
expect(subject[:variables].first[:key]).to eq('VAR1')
end
end
+
+ describe '.try_megabytes_to_bytes' do
+ context 'when the size can be converted to megabytes' do
+ it 'returns the size in megabytes' do
+ size = described_class.try_megabytes_to_bytes(1)
+
+ expect(size).to eq(1.megabytes)
+ end
+ end
+
+ context 'when the size can not be converted to megabytes' do
+ it 'returns the input size' do
+ size = described_class.try_megabytes_to_bytes('foo')
+
+ expect(size).to eq('foo')
+ end
+ end
+ end
end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index e2134dc279c..1fefc947636 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -97,6 +97,12 @@ describe GoogleApi::CloudPlatform::Client do
"node_config": {
"machine_type": machine_type
},
+ "master_auth": {
+ "username": "admin",
+ "client_certificate_config": {
+ issue_client_certificate: true
+ }
+ },
"legacy_abac": {
"enabled": true
}
@@ -122,6 +128,12 @@ describe GoogleApi::CloudPlatform::Client do
"node_config": {
"machine_type": machine_type
},
+ "master_auth": {
+ "username": "admin",
+ "client_certificate_config": {
+ issue_client_certificate: true
+ }
+ },
"legacy_abac": {
"enabled": false
}
diff --git a/spec/migrations/add_foreign_keys_to_todos_spec.rb b/spec/migrations/add_foreign_keys_to_todos_spec.rb
index efd87173b9c..2500e2f8333 100644
--- a/spec/migrations/add_foreign_keys_to_todos_spec.rb
+++ b/spec/migrations/add_foreign_keys_to_todos_spec.rb
@@ -36,7 +36,7 @@ describe AddForeignKeysToTodos, :migration do
end
context 'add foreign key on note_id' do
- let(:note) { create(:note) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ let(:note) { table(:notes).create! }
let!(:todo_with_note) { create_todo(note_id: note.id) }
let!(:todo_with_invalid_note) { create_todo(note_id: 4711) }
let!(:todo_without_note) { create_todo(note_id: nil) }
diff --git a/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb
index d8dd7a2fb83..13dc62595b5 100644
--- a/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb
+++ b/spec/migrations/add_head_pipeline_for_each_merge_request_spec.rb
@@ -1,23 +1,21 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170508170547_add_head_pipeline_for_each_merge_request.rb')
-describe AddHeadPipelineForEachMergeRequest, :delete do
- include ProjectForksHelper
-
+describe AddHeadPipelineForEachMergeRequest, :migration do
let(:migration) { described_class.new }
- let!(:project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:other_project) { fork_project(project) }
+ let!(:project) { table(:projects).create! }
+ let!(:other_project) { table(:projects).create! }
- let!(:pipeline_1) { create(:ci_pipeline, project: project, ref: "branch_1") } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:pipeline_2) { create(:ci_pipeline, project: other_project, ref: "branch_1") } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:pipeline_3) { create(:ci_pipeline, project: other_project, ref: "branch_1") } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:pipeline_4) { create(:ci_pipeline, project: project, ref: "branch_2") } # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ let!(:pipeline_1) { table(:ci_pipelines).create!(project_id: project.id, ref: "branch_1") }
+ let!(:pipeline_2) { table(:ci_pipelines).create!(project_id: other_project.id, ref: "branch_1") }
+ let!(:pipeline_3) { table(:ci_pipelines).create!(project_id: other_project.id, ref: "branch_1") }
+ let!(:pipeline_4) { table(:ci_pipelines).create!(project_id: project.id, ref: "branch_2") }
- let!(:mr_1) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_1", target_branch: "target_1") } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:mr_2) { create(:merge_request, source_project: other_project, target_project: project, source_branch: "branch_1", target_branch: "target_2") } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:mr_3) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_2", target_branch: "master") } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:mr_4) { create(:merge_request, source_project: project, target_project: project, source_branch: "branch_3", target_branch: "master") } # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ let!(:mr_1) { table(:merge_requests).create!(source_project_id: project.id, target_project_id: project.id, source_branch: "branch_1", target_branch: "target_1") }
+ let!(:mr_2) { table(:merge_requests).create!(source_project_id: other_project.id, target_project_id: project.id, source_branch: "branch_1", target_branch: "target_2") }
+ let!(:mr_3) { table(:merge_requests).create!(source_project_id: project.id, target_project_id: project.id, source_branch: "branch_2", target_branch: "master") }
+ let!(:mr_4) { table(:merge_requests).create!(source_project_id: project.id, target_project_id: project.id, source_branch: "branch_3", target_branch: "master") }
context "#up" do
context "when source_project and source_branch of pipeline are the same of merge request" do
diff --git a/spec/migrations/calculate_conv_dev_index_percentages_spec.rb b/spec/migrations/calculate_conv_dev_index_percentages_spec.rb
index 19f06810e54..09c78d02890 100644
--- a/spec/migrations/calculate_conv_dev_index_percentages_spec.rb
+++ b/spec/migrations/calculate_conv_dev_index_percentages_spec.rb
@@ -3,12 +3,30 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170803090603_calculate_conv_dev_index_percentages.rb')
-describe CalculateConvDevIndexPercentages, :delete do
+describe CalculateConvDevIndexPercentages, :migration do
let(:migration) { described_class.new }
let!(:conv_dev_index) do
- create(:conversational_development_index_metric, # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ table(:conversational_development_index_metrics).create!(
+ leader_issues: 9.256,
leader_notes: 0,
+ leader_milestones: 16.2456,
+ leader_boards: 5.2123,
+ leader_merge_requests: 1.2,
+ leader_ci_pipelines: 12.1234,
+ leader_environments: 3.3333,
+ leader_deployments: 1.200,
+ leader_projects_prometheus_active: 0.111,
+ leader_service_desk_issues: 15.891,
+ instance_issues: 1.234,
+ instance_notes: 28.123,
instance_milestones: 0,
+ instance_boards: 3.254,
+ instance_merge_requests: 0.6,
+ instance_ci_pipelines: 2.344,
+ instance_environments: 2.2222,
+ instance_deployments: 0.771,
+ instance_projects_prometheus_active: 0.109,
+ instance_service_desk_issues: 13.345,
percentage_issues: 0,
percentage_notes: 0,
percentage_milestones: 0,
diff --git a/spec/migrations/cleanup_nonexisting_namespace_pending_delete_projects_spec.rb b/spec/migrations/cleanup_nonexisting_namespace_pending_delete_projects_spec.rb
index 8f40ac3e38b..0e6bded29b4 100644
--- a/spec/migrations/cleanup_nonexisting_namespace_pending_delete_projects_spec.rb
+++ b/spec/migrations/cleanup_nonexisting_namespace_pending_delete_projects_spec.rb
@@ -1,20 +1,17 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170816102555_cleanup_nonexisting_namespace_pending_delete_projects.rb')
-describe CleanupNonexistingNamespacePendingDeleteProjects do
- before do
- # Stub after_save callbacks that will fail when Project has invalid namespace
- allow_any_instance_of(Project).to receive(:ensure_storage_path_exist).and_return(nil)
- allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
- end
+describe CleanupNonexistingNamespacePendingDeleteProjects, :migration do
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
describe '#up' do
- set(:some_project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ let!(:some_project) { projects.create! }
+ let(:namespace) { namespaces.create!(name: 'test', path: 'test') }
it 'only cleans up when namespace does not exist' do
- create(:project, pending_delete: true) # rubocop:disable RSpec/FactoriesInMigrationSpecs
- project = build(:project, pending_delete: true, namespace: nil, namespace_id: Namespace.maximum(:id).to_i.succ) # rubocop:disable RSpec/FactoriesInMigrationSpecs
- project.save(validate: false)
+ projects.create!(pending_delete: true, namespace_id: namespace.id)
+ project = projects.create!(pending_delete: true, namespace_id: 0)
expect(NamespacelessProjectDestroyWorker).to receive(:bulk_perform_async).with([[project.id]])
@@ -22,7 +19,7 @@ describe CleanupNonexistingNamespacePendingDeleteProjects do
end
it 'does nothing when no pending delete projects without namespace found' do
- create(:project, pending_delete: true, namespace: create(:namespace)) # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ projects.create!(pending_delete: true, namespace_id: namespace.id)
expect(NamespacelessProjectDestroyWorker).not_to receive(:bulk_perform_async)
diff --git a/spec/migrations/delete_inconsistent_internal_id_records_spec.rb b/spec/migrations/delete_inconsistent_internal_id_records_spec.rb
index e2ce69a7bb1..58b8b4a16f0 100644
--- a/spec/migrations/delete_inconsistent_internal_id_records_spec.rb
+++ b/spec/migrations/delete_inconsistent_internal_id_records_spec.rb
@@ -1,25 +1,36 @@
# frozen_string_literal: true
-# rubocop:disable RSpec/FactoriesInMigrationSpecs
+
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180723130817_delete_inconsistent_internal_id_records.rb')
describe DeleteInconsistentInternalIdRecords, :migration do
- let!(:project1) { create(:project) }
- let!(:project2) { create(:project) }
- let!(:project3) { create(:project) }
+ let!(:namespace) { table(:namespaces).create!(name: 'test', path: 'test') }
+ let!(:project1) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:project2) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:project3) { table(:projects).create!(namespace_id: namespace.id) }
- let(:internal_id_query) { ->(project) { InternalId.where(usage: InternalId.usages[scope.to_s.tableize], project: project) } }
+ let(:internal_ids) { table(:internal_ids) }
+ let(:internal_id_query) { ->(project) { InternalId.where(usage: InternalId.usages[scope.to_s.tableize], project_id: project.id) } }
let(:create_models) do
- 3.times { create(scope, project: project1) }
- 3.times { create(scope, project: project2) }
- 3.times { create(scope, project: project3) }
+ [project1, project2, project3].each do |project|
+ 3.times do |i|
+ attributes = required_attributes.merge(project_id: project.id,
+ iid: i.succ)
+
+ table(scope.to_s.pluralize).create!(attributes)
+ end
+ end
end
shared_examples_for 'deleting inconsistent internal_id records' do
before do
create_models
+ [project1, project2, project3].each do |project|
+ internal_ids.create!(project_id: project.id, usage: InternalId.usages[scope.to_s.tableize], last_value: 3)
+ end
+
internal_id_query.call(project1).first.tap do |iid|
iid.last_value = iid.last_value - 2
# This is an inconsistent record
@@ -33,11 +44,11 @@ describe DeleteInconsistentInternalIdRecords, :migration do
end
end
- it "deletes inconsistent issues" do
+ it "deletes inconsistent records" do
expect { migrate! }.to change { internal_id_query.call(project1).size }.from(1).to(0)
end
- it "retains consistent issues" do
+ it "retains consistent records" do
expect { migrate! }.not_to change { internal_id_query.call(project2).size }
end
@@ -48,6 +59,8 @@ describe DeleteInconsistentInternalIdRecords, :migration do
context 'for issues' do
let(:scope) { :issue }
+ let(:required_attributes) { {} }
+
it_behaves_like 'deleting inconsistent internal_id records'
end
@@ -55,9 +68,17 @@ describe DeleteInconsistentInternalIdRecords, :migration do
let(:scope) { :merge_request }
let(:create_models) do
- 3.times { |i| create(scope, target_project: project1, source_project: project1, source_branch: i.to_s) }
- 3.times { |i| create(scope, target_project: project2, source_project: project2, source_branch: i.to_s) }
- 3.times { |i| create(scope, target_project: project3, source_project: project3, source_branch: i.to_s) }
+ [project1, project2, project3].each do |project|
+ 3.times do |i|
+ table(:merge_requests).create!(
+ target_project_id: project.id,
+ source_project_id: project.id,
+ target_branch: 'master',
+ source_branch: j.to_s,
+ iid: i.succ
+ )
+ end
+ end
end
it_behaves_like 'deleting inconsistent internal_id records'
@@ -66,13 +87,6 @@ describe DeleteInconsistentInternalIdRecords, :migration do
context 'for deployments' do
let(:scope) { :deployment }
let(:deployments) { table(:deployments) }
- let(:internal_ids) { table(:internal_ids) }
-
- before do
- internal_ids.create!(project_id: project1.id, usage: 2, last_value: 2)
- internal_ids.create!(project_id: project2.id, usage: 2, last_value: 2)
- internal_ids.create!(project_id: project3.id, usage: 2, last_value: 2)
- end
let(:create_models) do
3.times { |i| deployments.create!(project_id: project1.id, iid: i, environment_id: 1, ref: 'master', sha: 'a', tag: false) }
@@ -85,17 +99,14 @@ describe DeleteInconsistentInternalIdRecords, :migration do
context 'for milestones (by project)' do
let(:scope) { :milestone }
+ let(:required_attributes) { { title: 'test' } }
+
it_behaves_like 'deleting inconsistent internal_id records'
end
context 'for ci_pipelines' do
let(:scope) { :ci_pipeline }
-
- let(:create_models) do
- create_list(:ci_empty_pipeline, 3, project: project1)
- create_list(:ci_empty_pipeline, 3, project: project2)
- create_list(:ci_empty_pipeline, 3, project: project3)
- end
+ let(:required_attributes) { { ref: 'test' } }
it_behaves_like 'deleting inconsistent internal_id records'
end
@@ -107,12 +118,20 @@ describe DeleteInconsistentInternalIdRecords, :migration do
let(:group2) { groups.create(name: 'Group 2', type: 'Group', path: 'group_2') }
let(:group3) { groups.create(name: 'Group 2', type: 'Group', path: 'group_3') }
- let(:internal_id_query) { ->(group) { InternalId.where(usage: InternalId.usages['milestones'], namespace: group) } }
+ let(:internal_id_query) { ->(group) { InternalId.where(usage: InternalId.usages['milestones'], namespace_id: group.id) } }
before do
- 3.times { create(:milestone, group_id: group1.id) }
- 3.times { create(:milestone, group_id: group2.id) }
- 3.times { create(:milestone, group_id: group3.id) }
+ [group1, group2, group3].each do |group|
+ 3.times do |i|
+ table(:milestones).create!(
+ group_id: group.id,
+ title: 'test',
+ iid: i.succ
+ )
+ end
+
+ internal_ids.create!(namespace_id: group.id, usage: InternalId.usages['milestones'], last_value: 3)
+ end
internal_id_query.call(group1).first.tap do |iid|
iid.last_value = iid.last_value - 2
@@ -127,11 +146,11 @@ describe DeleteInconsistentInternalIdRecords, :migration do
end
end
- it "deletes inconsistent issues" do
+ it "deletes inconsistent records" do
expect { migrate! }.to change { internal_id_query.call(group1).size }.from(1).to(0)
end
- it "retains consistent issues" do
+ it "retains consistent records" do
expect { migrate! }.not_to change { internal_id_query.call(group2).size }
end
diff --git a/spec/migrations/issues_moved_to_id_foreign_key_spec.rb b/spec/migrations/issues_moved_to_id_foreign_key_spec.rb
index 495e86ee888..71a4e71ac8a 100644
--- a/spec/migrations/issues_moved_to_id_foreign_key_spec.rb
+++ b/spec/migrations/issues_moved_to_id_foreign_key_spec.rb
@@ -1,20 +1,19 @@
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20171106151218_issues_moved_to_id_foreign_key.rb')
-# The schema version has to be far enough in advance to have the
-# only_mirror_protected_branches column in the projects table to create a
-# project via FactoryBot.
-describe IssuesMovedToIdForeignKey, :migration, schema: 20171114150259 do
- let!(:issue_first) { create(:issue, moved_to_id: issue_second.id) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:issue_second) { create(:issue, moved_to_id: issue_third.id) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:issue_third) { create(:issue) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
+describe IssuesMovedToIdForeignKey, :migration do
+ let(:issues) { table(:issues) }
+
+ let!(:issue_third) { issues.create! }
+ let!(:issue_second) { issues.create!(moved_to_id: issue_third.id) }
+ let!(:issue_first) { issues.create!(moved_to_id: issue_second.id) }
subject { described_class.new }
it 'removes the orphaned moved_to_id' do
subject.down
- issue_third.update(moved_to_id: 100000)
+ issue_third.update!(moved_to_id: 0)
subject.up
diff --git a/spec/migrations/migrate_old_artifacts_spec.rb b/spec/migrations/migrate_old_artifacts_spec.rb
index af77d64fdbf..79e21514506 100644
--- a/spec/migrations/migrate_old_artifacts_spec.rb
+++ b/spec/migrations/migrate_old_artifacts_spec.rb
@@ -3,7 +3,9 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170523083112_migrate_old_artifacts.rb')
-describe MigrateOldArtifacts do
+# Adding the ci_job_artifacts table (from the 20170918072948 schema)
+# makes the use of model code below easier.
+describe MigrateOldArtifacts, :migration, schema: 20170918072948 do
let(:migration) { described_class.new }
let!(:directory) { Dir.mktmpdir }
@@ -16,18 +18,22 @@ describe MigrateOldArtifacts do
end
context 'with migratable data' do
- set(:project1) { create(:project, ci_id: 2) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- set(:project2) { create(:project, ci_id: 3) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- set(:project3) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
-
- set(:pipeline1) { create(:ci_empty_pipeline, project: project1) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- set(:pipeline2) { create(:ci_empty_pipeline, project: project2) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- set(:pipeline3) { create(:ci_empty_pipeline, project: project3) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
-
- let!(:build_with_legacy_artifacts) { create(:ci_build, pipeline: pipeline1) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:build_without_artifacts) { create(:ci_build, pipeline: pipeline1) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:build2) { create(:ci_build, pipeline: pipeline2) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:build3) { create(:ci_build, pipeline: pipeline3) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ let(:projects) { table(:projects) }
+ let(:ci_pipelines) { table(:ci_pipelines) }
+ let(:ci_builds) { table(:ci_builds) }
+
+ let!(:project1) { projects.create!(ci_id: 2) }
+ let!(:project2) { projects.create!(ci_id: 3) }
+ let!(:project3) { projects.create! }
+
+ let!(:pipeline1) { ci_pipelines.create!(project_id: project1.id) }
+ let!(:pipeline2) { ci_pipelines.create!(project_id: project2.id) }
+ let!(:pipeline3) { ci_pipelines.create!(project_id: project3.id) }
+
+ let!(:build_with_legacy_artifacts) { ci_builds.create!(commit_id: pipeline1.id, project_id: project1.id, type: 'Ci::Build').becomes(Ci::Build) }
+ let!(:build_without_artifacts) { ci_builds.create!(commit_id: pipeline1.id, project_id: project1.id, type: 'Ci::Build').becomes(Ci::Build) }
+ let!(:build2) { ci_builds.create!(commit_id: pipeline2.id, project_id: project2.id, type: 'Ci::Build').becomes(Ci::Build) }
+ let!(:build3) { ci_builds.create!(commit_id: pipeline3.id, project_id: project3.id, type: 'Ci::Build').becomes(Ci::Build) }
before do
setup_builds(build2, build3)
diff --git a/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb b/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb
index 99173708190..88aef3b70b4 100644
--- a/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb
+++ b/spec/migrations/migrate_user_activities_to_users_last_activity_on_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170324160416_migrate_user_activities_to_users_last_activity_on.rb')
-describe MigrateUserActivitiesToUsersLastActivityOn, :clean_gitlab_redis_shared_state, :delete do
+describe MigrateUserActivitiesToUsersLastActivityOn, :clean_gitlab_redis_shared_state, :migration do
let(:migration) { described_class.new }
- let!(:user_active_1) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
- let!(:user_active_2) { create(:user) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ let!(:user_active_1) { table(:users).create!(email: 'test1', username: 'test1') }
+ let!(:user_active_2) { table(:users).create!(email: 'test2', username: 'test2') }
def record_activity(user, time)
Gitlab::Redis::SharedState.with do |redis|
diff --git a/spec/migrations/migrate_user_project_view_spec.rb b/spec/migrations/migrate_user_project_view_spec.rb
index 80468b9d01e..a0179ab3ceb 100644
--- a/spec/migrations/migrate_user_project_view_spec.rb
+++ b/spec/migrations/migrate_user_project_view_spec.rb
@@ -3,15 +3,15 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170406142253_migrate_user_project_view.rb')
-describe MigrateUserProjectView, :delete do
+describe MigrateUserProjectView, :migration do
let(:migration) { described_class.new }
- let!(:user) { create(:user, project_view: 'readme') } # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ let!(:user) { table(:users).create!(project_view: User.project_views['readme']) }
describe '#up' do
it 'updates project view setting with new value' do
migration.up
- expect(user.reload.project_view).to eq('files')
+ expect(user.reload.project_view).to eq(User.project_views['files'])
end
end
end
diff --git a/spec/migrations/move_personal_snippets_files_spec.rb b/spec/migrations/move_personal_snippets_files_spec.rb
index 1f39ad98fb8..d94ae1e52f5 100644
--- a/spec/migrations/move_personal_snippets_files_spec.rb
+++ b/spec/migrations/move_personal_snippets_files_spec.rb
@@ -1,12 +1,19 @@
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170612071012_move_personal_snippets_files.rb')
-describe MovePersonalSnippetsFiles do
+describe MovePersonalSnippetsFiles, :migration do
let(:migration) { described_class.new }
let(:test_dir) { File.join(Rails.root, "tmp", "tests", "move_snippet_files_test") }
let(:uploads_dir) { File.join(test_dir, 'uploads') }
let(:new_uploads_dir) { File.join(uploads_dir, '-', 'system') }
+ let(:notes) { table(:notes) }
+ let(:snippets) { table(:snippets) }
+ let(:uploads) { table(:uploads) }
+
+ let(:user) { table(:users).create!(email: 'user@example.com', projects_limit: 10) }
+ let(:project) { table(:projects).create!(name: 'gitlab', namespace_id: 1) }
+
before do
allow(CarrierWave).to receive(:root).and_return(test_dir)
allow(migration).to receive(:base_directory).and_return(test_dir)
@@ -16,14 +23,14 @@ describe MovePersonalSnippetsFiles do
describe "#up" do
let(:snippet) do
- snippet = create(:personal_snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ snippet = snippets.create!(author_id: user.id)
create_upload('picture.jpg', snippet)
snippet.update(description: markdown_linking_file('picture.jpg', snippet))
snippet
end
let(:snippet_with_missing_file) do
- snippet = create(:snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ snippet = snippets.create!(author_id: user.id, project_id: project.id)
create_upload('picture.jpg', snippet, create_file: false)
snippet.update(description: markdown_linking_file('picture.jpg', snippet))
snippet
@@ -62,7 +69,10 @@ describe MovePersonalSnippetsFiles do
secret = "secret#{snippet.id}"
file_location = "/uploads/-/system/personal_snippet/#{snippet.id}/#{secret}/picture.jpg"
markdown = markdown_linking_file('picture.jpg', snippet)
- note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown}") # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ note = notes.create!(noteable_id: snippet.id,
+ noteable_type: Snippet,
+ note: "with #{markdown}",
+ author_id: user.id)
migration.up
@@ -73,14 +83,14 @@ describe MovePersonalSnippetsFiles do
describe "#down" do
let(:snippet) do
- snippet = create(:personal_snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ snippet = snippets.create!(author_id: user.id)
create_upload('picture.jpg', snippet, in_new_path: true)
snippet.update(description: markdown_linking_file('picture.jpg', snippet, in_new_path: true))
snippet
end
let(:snippet_with_missing_file) do
- snippet = create(:personal_snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ snippet = snippets.create!(author_id: user.id)
create_upload('picture.jpg', snippet, create_file: false, in_new_path: true)
snippet.update(description: markdown_linking_file('picture.jpg', snippet, in_new_path: true))
snippet
@@ -119,7 +129,10 @@ describe MovePersonalSnippetsFiles do
markdown = markdown_linking_file('picture.jpg', snippet, in_new_path: true)
secret = "secret#{snippet.id}"
file_location = "/uploads/personal_snippet/#{snippet.id}/#{secret}/picture.jpg"
- note = create(:note_on_personal_snippet, noteable: snippet, note: "with #{markdown}") # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ note = notes.create!(noteable_id: snippet.id,
+ noteable_type: Snippet,
+ note: "with #{markdown}",
+ author_id: user.id)
migration.down
@@ -135,7 +148,7 @@ describe MovePersonalSnippetsFiles do
secret = '123456789'
filename = 'hello.jpg'
- snippet = create(:personal_snippet) # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ snippet = snippets.create!(author_id: user.id)
path_before = "/uploads/personal_snippet/#{snippet.id}/#{secret}/#{filename}"
path_after = "/uploads/system/personal_snippet/#{snippet.id}/#{secret}/#{filename}"
@@ -161,7 +174,11 @@ describe MovePersonalSnippetsFiles do
FileUtils.touch(absolute_path)
end
- create(:upload, model: snippet, path: "#{secret}/#{filename}", uploader: PersonalFileUploader) # rubocop:disable RSpec/FactoriesInMigrationSpecs
+ uploads.create!(model_id: snippet.id,
+ model_type: snippet.class,
+ path: "#{secret}/#{filename}",
+ uploader: PersonalFileUploader,
+ size: 100.kilobytes)
end
def markdown_linking_file(filename, snippet, in_new_path: false)
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 789e14e8a20..314f0728b8e 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -117,14 +117,6 @@ describe ApplicationSetting do
it { expect(setting.repository_storages).to eq(['default']) }
end
- context '#commit_email_hostname' do
- it 'returns configured gitlab hostname if commit_email_hostname is not defined' do
- setting.update(commit_email_hostname: nil)
-
- expect(setting.commit_email_hostname).to eq("users.noreply.#{Gitlab.config.gitlab.host}")
- end
- end
-
context 'auto_devops_domain setting' do
context 'when auto_devops_enabled? is true' do
before do
@@ -182,15 +174,6 @@ describe ApplicationSetting do
it { is_expected.not_to allow_value("").for(:repository_storages) }
it { is_expected.not_to allow_value(nil).for(:repository_storages) }
end
-
- describe '.pick_repository_storage' do
- it 'uses Array#sample to pick a random storage' do
- array = double('array', sample: 'random')
- expect(setting).to receive(:repository_storages).and_return(array)
-
- expect(setting.pick_repository_storage).to eq('random')
- end
- end
end
context 'housekeeping settings' do
@@ -367,65 +350,6 @@ describe ApplicationSetting do
end
end
- context 'restricted signup domains' do
- it 'sets single domain' do
- setting.domain_whitelist_raw = 'example.com'
- expect(setting.domain_whitelist).to eq(['example.com'])
- end
-
- it 'sets multiple domains with spaces' do
- setting.domain_whitelist_raw = 'example.com *.example.com'
- expect(setting.domain_whitelist).to eq(['example.com', '*.example.com'])
- end
-
- it 'sets multiple domains with newlines and a space' do
- setting.domain_whitelist_raw = "example.com\n *.example.com"
- expect(setting.domain_whitelist).to eq(['example.com', '*.example.com'])
- end
-
- it 'sets multiple domains with commas' do
- setting.domain_whitelist_raw = "example.com, *.example.com"
- expect(setting.domain_whitelist).to eq(['example.com', '*.example.com'])
- end
- end
-
- context 'blacklisted signup domains' do
- it 'sets single domain' do
- setting.domain_blacklist_raw = 'example.com'
- expect(setting.domain_blacklist).to contain_exactly('example.com')
- end
-
- it 'sets multiple domains with spaces' do
- setting.domain_blacklist_raw = 'example.com *.example.com'
- expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com')
- end
-
- it 'sets multiple domains with newlines and a space' do
- setting.domain_blacklist_raw = "example.com\n *.example.com"
- expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com')
- end
-
- it 'sets multiple domains with commas' do
- setting.domain_blacklist_raw = "example.com, *.example.com"
- expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com')
- end
-
- it 'sets multiple domains with semicolon' do
- setting.domain_blacklist_raw = "example.com; *.example.com"
- expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com')
- end
-
- it 'sets multiple domains with mixture of everything' do
- setting.domain_blacklist_raw = "example.com; *.example.com\n test.com\sblock.com yes.com"
- expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com', 'test.com', 'block.com', 'yes.com')
- end
-
- it 'sets multiple domain with file' do
- setting.domain_blacklist_file = File.open(Rails.root.join('spec/fixtures/', 'domain_blacklist.txt'))
- expect(setting.domain_blacklist).to contain_exactly('example.com', 'test.com', 'foo.bar')
- end
- end
-
describe 'performance bar settings' do
describe 'performance_bar_allowed_group' do
context 'with no performance_bar_allowed_group_id saved' do
@@ -462,142 +386,6 @@ describe ApplicationSetting do
end
end
- describe 'usage ping settings' do
- context 'when the usage ping is disabled in gitlab.yml' do
- before do
- allow(Settings.gitlab).to receive(:usage_ping_enabled).and_return(false)
- end
-
- it 'does not allow the usage ping to be configured' do
- expect(setting.usage_ping_can_be_configured?).to be_falsey
- end
-
- context 'when the usage ping is disabled in the DB' do
- before do
- setting.usage_ping_enabled = false
- end
-
- it 'returns false for usage_ping_enabled' do
- expect(setting.usage_ping_enabled).to be_falsey
- end
- end
-
- context 'when the usage ping is enabled in the DB' do
- before do
- setting.usage_ping_enabled = true
- end
-
- it 'returns false for usage_ping_enabled' do
- expect(setting.usage_ping_enabled).to be_falsey
- end
- end
- end
-
- context 'when the usage ping is enabled in gitlab.yml' do
- before do
- allow(Settings.gitlab).to receive(:usage_ping_enabled).and_return(true)
- end
-
- it 'allows the usage ping to be configured' do
- expect(setting.usage_ping_can_be_configured?).to be_truthy
- end
-
- context 'when the usage ping is disabled in the DB' do
- before do
- setting.usage_ping_enabled = false
- end
-
- it 'returns false for usage_ping_enabled' do
- expect(setting.usage_ping_enabled).to be_falsey
- end
- end
-
- context 'when the usage ping is enabled in the DB' do
- before do
- setting.usage_ping_enabled = true
- end
-
- it 'returns true for usage_ping_enabled' do
- expect(setting.usage_ping_enabled).to be_truthy
- end
- end
- end
- end
-
- describe '#allowed_key_types' do
- it 'includes all key types by default' do
- expect(setting.allowed_key_types).to contain_exactly(*described_class::SUPPORTED_KEY_TYPES)
- end
-
- it 'excludes disabled key types' do
- expect(setting.allowed_key_types).to include(:ed25519)
-
- setting.ed25519_key_restriction = described_class::FORBIDDEN_KEY_VALUE
-
- expect(setting.allowed_key_types).not_to include(:ed25519)
- end
- end
-
- describe '#key_restriction_for' do
- it 'returns the restriction value for recognised types' do
- setting.rsa_key_restriction = 1024
-
- expect(setting.key_restriction_for(:rsa)).to eq(1024)
- end
-
- it 'allows types to be passed as a string' do
- setting.rsa_key_restriction = 1024
-
- expect(setting.key_restriction_for('rsa')).to eq(1024)
- end
-
- it 'returns forbidden for unrecognised type' do
- expect(setting.key_restriction_for(:foo)).to eq(described_class::FORBIDDEN_KEY_VALUE)
- end
- end
-
- describe '#allow_signup?' do
- it 'returns true' do
- expect(setting.allow_signup?).to be_truthy
- end
-
- it 'returns false if signup is disabled' do
- allow(setting).to receive(:signup_enabled?).and_return(false)
-
- expect(setting.allow_signup?).to be_falsey
- end
-
- it 'returns false if password authentication is disabled for the web interface' do
- allow(setting).to receive(:password_authentication_enabled_for_web?).and_return(false)
-
- expect(setting.allow_signup?).to be_falsey
- end
- end
-
- describe '#user_default_internal_regex_enabled?' do
- using RSpec::Parameterized::TableSyntax
-
- where(:user_default_external, :user_default_internal_regex, :result) do
- false | nil | false
- false | '' | false
- false | '^(?:(?!\.ext@).)*$\r?\n?' | false
- true | '' | false
- true | nil | false
- true | '^(?:(?!\.ext@).)*$\r?\n?' | true
- end
-
- with_them do
- before do
- setting.update(user_default_external: user_default_external)
- setting.update(user_default_internal_regex: user_default_internal_regex)
- end
-
- subject { setting.user_default_internal_regex_enabled? }
-
- it { is_expected.to eq(result) }
- end
- end
-
context 'diff limit settings' do
describe '#diff_max_patch_bytes' do
context 'validations' do
@@ -613,23 +401,5 @@ describe ApplicationSetting do
end
end
- describe '#archive_builds_older_than' do
- subject { setting.archive_builds_older_than }
-
- context 'when the archive_builds_in_seconds is set' do
- before do
- setting.archive_builds_in_seconds = 3600
- end
-
- it { is_expected.to be_within(1.minute).of(1.hour.ago) }
- end
-
- context 'when the archive_builds_in_seconds is set' do
- before do
- setting.archive_builds_in_seconds = nil
- end
-
- it { is_expected.to be_nil }
- end
- end
+ it_behaves_like 'application settings examples'
end
diff --git a/spec/models/broadcast_message_spec.rb b/spec/models/broadcast_message_spec.rb
index 89839709131..30ca07d5d2c 100644
--- a/spec/models/broadcast_message_spec.rb
+++ b/spec/models/broadcast_message_spec.rb
@@ -95,6 +95,12 @@ describe BroadcastMessage do
end
end
+ describe '#attributes' do
+ it 'includes message_html field' do
+ expect(subject.attributes.keys).to include("cached_markdown_version", "message_html")
+ end
+ end
+
describe '#active?' do
it 'is truthy when started and not ended' do
message = build(:broadcast_message)
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index dd40b968bc1..7500e6ae5b1 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -186,6 +186,37 @@ describe Ci::Build do
end
end
+ describe '#enqueue' do
+ let(:build) { create(:ci_build, :created) }
+
+ subject { build.enqueue }
+
+ before do
+ allow(build).to receive(:any_unmet_prerequisites?).and_return(has_prerequisites)
+ allow(Ci::PrepareBuildService).to receive(:perform_async)
+ end
+
+ context 'build has unmet prerequisites' do
+ let(:has_prerequisites) { true }
+
+ it 'transitions to preparing' do
+ subject
+
+ expect(build).to be_preparing
+ end
+ end
+
+ context 'build has no prerequisites' do
+ let(:has_prerequisites) { false }
+
+ it 'transitions to pending' do
+ subject
+
+ expect(build).to be_pending
+ end
+ end
+ end
+
describe '#actionize' do
context 'when build is a created' do
before do
@@ -344,6 +375,18 @@ describe Ci::Build do
expect(build).to be_pending
end
+
+ context 'build has unmet prerequisites' do
+ before do
+ allow(build).to receive(:prerequisites).and_return([double])
+ end
+
+ it 'transits to preparing' do
+ subject
+
+ expect(build).to be_preparing
+ end
+ end
end
end
@@ -2772,7 +2815,7 @@ describe Ci::Build do
end
context 'when ref is merge request' do
- let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
let(:pipeline) { merge_request.merge_request_pipelines.first }
let(:build) { create(:ci_build, ref: merge_request.source_branch, tag: false, pipeline: pipeline, project: project) }
@@ -2830,7 +2873,7 @@ describe Ci::Build do
end
context 'when ref is merge request' do
- let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
let(:pipeline) { merge_request.merge_request_pipelines.first }
let(:build) { create(:ci_build, ref: merge_request.source_branch, tag: false, pipeline: pipeline, project: project) }
@@ -2876,6 +2919,36 @@ describe Ci::Build do
end
end
+ describe '#any_unmet_prerequisites?' do
+ let(:build) { create(:ci_build, :created) }
+
+ subject { build.any_unmet_prerequisites? }
+
+ context 'build has prerequisites' do
+ before do
+ allow(build).to receive(:prerequisites).and_return([double])
+ end
+
+ it { is_expected.to be_truthy }
+
+ context 'and the ci_preparing_state feature is disabled' do
+ before do
+ stub_feature_flags(ci_preparing_state: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'build does not have prerequisites' do
+ before do
+ allow(build).to receive(:prerequisites).and_return([])
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#yaml_variables' do
let(:build) { create(:ci_build, pipeline: pipeline, yaml_variables: variables) }
@@ -2928,6 +3001,20 @@ describe Ci::Build do
end
end
+ describe 'state transition: any => [:preparing]' do
+ let(:build) { create(:ci_build, :created) }
+
+ before do
+ allow(build).to receive(:prerequisites).and_return([double])
+ end
+
+ it 'queues BuildPrepareWorker' do
+ expect(Ci::BuildPrepareWorker).to receive(:perform_async).with(build.id)
+
+ build.enqueue
+ end
+ end
+
describe 'state transition: any => [:pending]' do
let(:build) { create(:ci_build, :created) }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index d0b42d103a5..5b8097621e0 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -130,22 +130,118 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '.for_sha' do
+ subject { described_class.for_sha(sha) }
+
+ let(:sha) { 'abc' }
+ let!(:pipeline) { create(:ci_pipeline, sha: 'abc') }
+
+ it 'returns the pipeline' do
+ is_expected.to contain_exactly(pipeline)
+ end
+
+ context 'when argument is array' do
+ let(:sha) { %w[abc def] }
+ let!(:pipeline_2) { create(:ci_pipeline, sha: 'def') }
+
+ it 'returns the pipelines' do
+ is_expected.to contain_exactly(pipeline, pipeline_2)
+ end
+ end
+
+ context 'when sha is empty' do
+ let(:sha) { nil }
+
+ it 'does not return anything' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ describe '.for_source_sha' do
+ subject { described_class.for_source_sha(source_sha) }
+
+ let(:source_sha) { 'abc' }
+ let!(:pipeline) { create(:ci_pipeline, source_sha: 'abc') }
+
+ it 'returns the pipeline' do
+ is_expected.to contain_exactly(pipeline)
+ end
+
+ context 'when argument is array' do
+ let(:source_sha) { %w[abc def] }
+ let!(:pipeline_2) { create(:ci_pipeline, source_sha: 'def') }
+
+ it 'returns the pipelines' do
+ is_expected.to contain_exactly(pipeline, pipeline_2)
+ end
+ end
+
+ context 'when source_sha is empty' do
+ let(:source_sha) { nil }
+
+ it 'does not return anything' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
+ describe '.for_sha_or_source_sha' do
+ subject { described_class.for_sha_or_source_sha(sha) }
+
+ let(:sha) { 'abc' }
+
+ context 'when sha is matched' do
+ let!(:pipeline) { create(:ci_pipeline, sha: sha) }
+
+ it 'returns the pipeline' do
+ is_expected.to contain_exactly(pipeline)
+ end
+ end
+
+ context 'when source sha is matched' do
+ let!(:pipeline) { create(:ci_pipeline, source_sha: sha) }
+
+ it 'returns the pipeline' do
+ is_expected.to contain_exactly(pipeline)
+ end
+ end
+
+ context 'when both sha and source sha are not matched' do
+ let!(:pipeline) { create(:ci_pipeline, sha: 'bcd', source_sha: 'bcd') }
+
+ it 'does not return anything' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
describe '.detached_merge_request_pipelines' do
- subject { described_class.detached_merge_request_pipelines(merge_request) }
+ subject { described_class.detached_merge_request_pipelines(merge_request, sha) }
let!(:pipeline) do
- create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, sha: merge_request.diff_head_sha)
end
let(:merge_request) { create(:merge_request) }
- let(:target_sha) { nil }
+ let(:sha) { merge_request.diff_head_sha }
it 'returns detached merge request pipelines' do
is_expected.to eq([pipeline])
end
- context 'when target sha exists' do
- let(:target_sha) { merge_request.target_branch_sha }
+ context 'when sha does not exist' do
+ let(:sha) { 'abc' }
+
+ it 'returns empty array' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when pipeline is merge request pipeline' do
+ let!(:pipeline) do
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, source_sha: merge_request.diff_head_sha)
+ end
it 'returns empty array' do
is_expected.to be_empty
@@ -173,21 +269,31 @@ describe Ci::Pipeline, :mailer do
end
describe '.merge_request_pipelines' do
- subject { described_class.merge_request_pipelines(merge_request) }
+ subject { described_class.merge_request_pipelines(merge_request, source_sha) }
let!(:pipeline) do
- create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, source_sha: merge_request.diff_head_sha)
end
let(:merge_request) { create(:merge_request) }
- let(:target_sha) { merge_request.target_branch_sha }
+ let(:source_sha) { merge_request.diff_head_sha }
it 'returns merge pipelines' do
is_expected.to eq([pipeline])
end
- context 'when target sha is empty' do
- let(:target_sha) { nil }
+ context 'when source sha is empty' do
+ let(:source_sha) { nil }
+
+ it 'returns empty array' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when pipeline is detached merge request pipeline' do
+ let!(:pipeline) do
+ create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, sha: merge_request.diff_head_sha)
+ end
it 'returns empty array' do
is_expected.to be_empty
@@ -256,6 +362,74 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#matches_sha_or_source_sha?' do
+ subject { pipeline.matches_sha_or_source_sha?(sample_sha) }
+
+ let(:sample_sha) { Digest::SHA1.hexdigest(SecureRandom.hex) }
+
+ context 'when sha matches' do
+ let(:pipeline) { build(:ci_pipeline, sha: sample_sha) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when source_sha matches' do
+ let(:pipeline) { build(:ci_pipeline, source_sha: sample_sha) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when both sha and source_sha do not matche' do
+ let(:pipeline) { build(:ci_pipeline, sha: 'test', source_sha: 'test') }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
+ describe '.triggered_for_branch' do
+ subject { described_class.triggered_for_branch(ref) }
+
+ let(:project) { create(:project, :repository) }
+ let(:ref) { 'feature' }
+ let!(:pipeline) { create(:ci_pipeline, ref: ref) }
+
+ it 'returns the pipeline' do
+ is_expected.to eq([pipeline])
+ end
+
+ context 'when sha is not specified' do
+ it 'returns the pipeline' do
+ expect(described_class.triggered_for_branch(ref)).to eq([pipeline])
+ end
+ end
+
+ context 'when pipeline is triggered for tag' do
+ let(:ref) { 'v1.1.0' }
+ let!(:pipeline) { create(:ci_pipeline, ref: ref, tag: true) }
+
+ it 'does not return the pipeline' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when pipeline is triggered for merge_request' do
+ let!(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ source_project: project,
+ source_branch: ref,
+ target_project: project,
+ target_branch: 'master')
+ end
+
+ let(:pipeline) { merge_request.merge_request_pipelines.first }
+
+ it 'does not return the pipeline' do
+ is_expected.to be_empty
+ end
+ end
+ end
+
describe '.merge_request_event' do
subject { described_class.merge_request_event }
@@ -1051,16 +1225,28 @@ describe Ci::Pipeline, :mailer do
end
describe '#started_at' do
- it 'updates on transitioning to running' do
- build.run
+ let(:pipeline) { create(:ci_empty_pipeline, status: from_status) }
+
+ %i[created preparing pending].each do |status|
+ context "from #{status}" do
+ let(:from_status) { status }
- expect(pipeline.reload.started_at).not_to be_nil
+ it 'updates on transitioning to running' do
+ pipeline.run
+
+ expect(pipeline.started_at).not_to be_nil
+ end
+ end
end
- it 'does not update on transitioning to success' do
- build.success
+ context 'from created' do
+ let(:from_status) { :created }
- expect(pipeline.reload.started_at).to be_nil
+ it 'does not update on transitioning to success' do
+ pipeline.succeed
+
+ expect(pipeline.started_at).to be_nil
+ end
end
end
@@ -1079,27 +1265,49 @@ describe Ci::Pipeline, :mailer do
end
describe 'merge request metrics' do
- let(:project) { create(:project, :repository) }
- let(:pipeline) { FactoryBot.create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master', sha: project.repository.commit('master').id) }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: pipeline.ref) }
+ let(:pipeline) { create(:ci_empty_pipeline, status: from_status) }
before do
expect(PipelineMetricsWorker).to receive(:perform_async).with(pipeline.id)
end
context 'when transitioning to running' do
- it 'schedules metrics workers' do
- pipeline.run
+ %i[created preparing pending].each do |status|
+ context "from #{status}" do
+ let(:from_status) { status }
+
+ it 'schedules metrics workers' do
+ pipeline.run
+ end
+ end
end
end
context 'when transitioning to success' do
+ let(:from_status) { 'created' }
+
it 'schedules metrics workers' do
pipeline.succeed
end
end
end
+ describe 'merge on success' do
+ let(:pipeline) { create(:ci_empty_pipeline, status: from_status) }
+
+ %i[created preparing pending running].each do |status|
+ context "from #{status}" do
+ let(:from_status) { status }
+
+ it 'schedules pipeline success worker' do
+ expect(PipelineSuccessWorker).to receive(:perform_async).with(pipeline.id)
+
+ pipeline.succeed
+ end
+ end
+ end
+ end
+
describe 'pipeline caching' do
it 'performs ExpirePipelinesCacheWorker' do
expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
@@ -1618,6 +1826,18 @@ describe Ci::Pipeline, :mailer do
subject { pipeline.reload.status }
+ context 'on prepare' do
+ before do
+ # Prevent skipping directly to 'pending'
+ allow(build).to receive(:prerequisites).and_return([double])
+ allow(Ci::BuildPrepareWorker).to receive(:perform_async)
+
+ build.enqueue
+ end
+
+ it { is_expected.to eq('preparing') }
+ end
+
context 'on queuing' do
before do
build.enqueue
diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb
index 6972fc03415..06d9bc076cd 100644
--- a/spec/models/clusters/applications/runner_spec.rb
+++ b/spec/models/clusters/applications/runner_spec.rb
@@ -22,7 +22,7 @@ describe Clusters::Applications::Runner do
it 'should be initialized with 4 arguments' do
expect(subject.name).to eq('runner')
expect(subject.chart).to eq('runner/gitlab-runner')
- expect(subject.version).to eq('0.2.0')
+ expect(subject.version).to eq('0.3.0')
expect(subject).to be_rbac
expect(subject.repository).to eq('https://charts.gitlab.io')
expect(subject.files).to eq(gitlab_runner.files)
@@ -40,7 +40,7 @@ describe Clusters::Applications::Runner do
let(:gitlab_runner) { create(:clusters_applications_runner, :errored, runner: ci_runner, version: '0.1.13') }
it 'should be initialized with the locked version' do
- expect(subject.version).to eq('0.2.0')
+ expect(subject.version).to eq('0.3.0')
end
end
end
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index cc93a1b4965..af65530e663 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -375,14 +375,14 @@ describe Clusters::Platforms::Kubernetes, :use_clean_rails_memory_store_caching
end
context 'with valid pods' do
- let(:pod) { kube_pod(app: environment.slug) }
- let(:pod_with_no_terminal) { kube_pod(app: environment.slug, status: "Pending") }
+ let(:pod) { kube_pod(environment_slug: environment.slug, project_slug: project.full_path_slug) }
+ let(:pod_with_no_terminal) { kube_pod(environment_slug: environment.slug, project_slug: project.full_path_slug, status: "Pending") }
let(:terminals) { kube_terminals(service, pod) }
before do
stub_reactive_cache(
service,
- pods: [pod, pod, pod_with_no_terminal, kube_pod(app: "should-be-filtered-out")]
+ pods: [pod, pod, pod_with_no_terminal, kube_pod(environment_slug: "should-be-filtered-out")]
)
end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index baad8352185..9d4e18534ae 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -542,7 +542,7 @@ eos
end
end
- describe '#uri_type' do
+ shared_examples '#uri_type' do
it 'returns the URI type at the given path' do
expect(commit.uri_type('files/html')).to be(:tree)
expect(commit.uri_type('files/images/logo-black.png')).to be(:raw)
@@ -561,6 +561,20 @@ eos
end
end
+ describe '#uri_type with Gitaly enabled' do
+ it_behaves_like "#uri_type"
+ end
+
+ describe '#uri_type with Rugged enabled', :enable_rugged do
+ it 'calls out to the Rugged implementation' do
+ allow_any_instance_of(Rugged::Tree).to receive(:path).with('files/html').and_call_original
+
+ commit.uri_type('files/html')
+ end
+
+ it_behaves_like '#uri_type'
+ end
+
describe '.from_hash' do
let(:new_commit) { described_class.from_hash(commit.to_hash, project) }
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 8b7c88805c1..e2b7f5c6ee2 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -49,6 +49,16 @@ describe CommitStatus do
commit_status.success!
end
+
+ describe 'transitioning to running' do
+ let(:commit_status) { create(:commit_status, :pending, started_at: nil) }
+
+ it 'records the started at time' do
+ commit_status.run!
+
+ expect(commit_status.started_at).to be_present
+ end
+ end
end
describe '#started?' do
@@ -479,6 +489,12 @@ describe CommitStatus do
it { is_expected.to be_script_failure }
end
+
+ context 'when failure_reason is unmet_prerequisites' do
+ let(:reason) { :unmet_prerequisites }
+
+ it { is_expected.to be_unmet_prerequisites }
+ end
end
describe 'ensure stage assignment' do
@@ -555,6 +571,7 @@ describe CommitStatus do
before do
allow(Time).to receive(:now).and_return(current_time)
+ expect(commit_status.any_unmet_prerequisites?).to eq false
end
shared_examples 'commit status enqueued' do
@@ -569,6 +586,12 @@ describe CommitStatus do
it_behaves_like 'commit status enqueued'
end
+ context 'when initial state is :preparing' do
+ let(:commit_status) { create(:commit_status, :preparing) }
+
+ it_behaves_like 'commit status enqueued'
+ end
+
context 'when initial state is :skipped' do
let(:commit_status) { create(:commit_status, :skipped) }
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 447279f19a8..7d555f15e39 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -23,6 +23,7 @@ describe CacheMarkdownField do
include CacheMarkdownField
cache_markdown_field :foo
cache_markdown_field :baz, pipeline: :single_line
+ cache_markdown_field :zoo, whitelisted: true
def self.add_attr(name)
self.attribute_names += [name]
@@ -35,7 +36,7 @@ describe CacheMarkdownField do
add_attr :cached_markdown_version
- [:foo, :foo_html, :bar, :baz, :baz_html].each do |name|
+ [:foo, :foo_html, :bar, :baz, :baz_html, :zoo, :zoo_html].each do |name|
add_attr(name)
end
@@ -84,8 +85,8 @@ describe CacheMarkdownField do
end
describe '.attributes' do
- it 'excludes cache attributes' do
- expect(thing.attributes.keys.sort).to eq(%w[bar baz foo])
+ it 'excludes cache attributes that is blacklisted by default' do
+ expect(thing.attributes.keys.sort).to eq(%w[bar baz cached_markdown_version foo zoo zoo_html])
end
end
@@ -297,7 +298,12 @@ describe CacheMarkdownField do
it 'saves the changes using #update_columns' do
expect(thing).to receive(:persisted?).and_return(true)
expect(thing).to receive(:update_columns)
- .with("foo_html" => updated_html, "baz_html" => "", "cached_markdown_version" => cache_version)
+ .with(
+ "foo_html" => updated_html,
+ "baz_html" => "",
+ "zoo_html" => "",
+ "cached_markdown_version" => cache_version
+ )
thing.refresh_markdown_cache!
end
diff --git a/spec/models/concerns/has_ref_spec.rb b/spec/models/concerns/has_ref_spec.rb
index 8aa2fecb18c..6805731fed3 100644
--- a/spec/models/concerns/has_ref_spec.rb
+++ b/spec/models/concerns/has_ref_spec.rb
@@ -18,7 +18,7 @@ describe HasRef do
end
context 'when it was triggered by merge request' do
- let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
let(:pipeline) { merge_request.merge_request_pipelines.first }
let(:build) { create(:ci_build, pipeline: pipeline) }
@@ -67,7 +67,7 @@ describe HasRef do
end
context 'when it is triggered by a merge request' do
- let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
let(:pipeline) { merge_request.merge_request_pipelines.first }
let(:build) { create(:ci_build, tag: false, pipeline: pipeline) }
diff --git a/spec/models/concerns/has_status_spec.rb b/spec/models/concerns/has_status_spec.rb
index 6b1038cb8fd..e8b1eba67cc 100644
--- a/spec/models/concerns/has_status_spec.rb
+++ b/spec/models/concerns/has_status_spec.rb
@@ -34,6 +34,22 @@ describe HasStatus do
it { is_expected.to eq 'running' }
end
+ context 'all preparing' do
+ let!(:statuses) do
+ [create(type, status: :preparing), create(type, status: :preparing)]
+ end
+
+ it { is_expected.to eq 'preparing' }
+ end
+
+ context 'at least one preparing' do
+ let!(:statuses) do
+ [create(type, status: :success), create(type, status: :preparing)]
+ end
+
+ it { is_expected.to eq 'preparing' }
+ end
+
context 'success and failed but allowed to fail' do
let!(:statuses) do
[create(type, status: :success),
@@ -188,7 +204,7 @@ describe HasStatus do
end
end
- %i[created running pending success
+ %i[created preparing running pending success
failed canceled skipped].each do |status|
it_behaves_like 'having a job', status
end
@@ -234,7 +250,7 @@ describe HasStatus do
describe '.alive' do
subject { CommitStatus.alive }
- %i[running pending created].each do |status|
+ %i[running pending preparing created].each do |status|
it_behaves_like 'containing the job', status
end
@@ -270,7 +286,7 @@ describe HasStatus do
describe '.cancelable' do
subject { CommitStatus.cancelable }
- %i[running pending created scheduled].each do |status|
+ %i[running pending preparing created scheduled].each do |status|
it_behaves_like 'containing the job', status
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index a8d53cfcd7d..5fce9504334 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -356,4 +356,32 @@ describe Deployment do
end
end
end
+
+ describe '#cluster' do
+ let(:deployment) { create(:deployment) }
+ let(:project) { deployment.project }
+ let(:environment) { deployment.environment }
+
+ subject { deployment.cluster }
+
+ before do
+ expect(project).to receive(:deployment_platform)
+ .with(environment: environment.name).and_call_original
+ end
+
+ context 'project has no deployment platform' do
+ before do
+ expect(project.clusters).to be_empty
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'project has a deployment platform' do
+ let!(:cluster) { create(:cluster, projects: [project]) }
+ let!(:platform) { create(:cluster_platform_kubernetes, cluster: cluster) }
+
+ it { is_expected.to eq cluster }
+ end
+ end
end
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index fda00a693f0..67e5f4f7e41 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -336,6 +336,16 @@ describe DiffNote do
end
end
+ describe '#banzai_render_context' do
+ let(:note) { create(:diff_note_on_merge_request) }
+
+ it 'includes expected context' do
+ context = note.banzai_render_context(:note)
+
+ expect(context).to include(suggestions_filter_enabled: true, noteable: note.noteable, project: note.project)
+ end
+ end
+
describe "image diff notes" do
subject { build(:image_diff_note_on_merge_request, project: project, noteable: merge_request) }
diff --git a/spec/models/environment_status_spec.rb b/spec/models/environment_status_spec.rb
index 9da16dea929..2576a9aba06 100644
--- a/spec/models/environment_status_spec.rb
+++ b/spec/models/environment_status_spec.rb
@@ -64,8 +64,8 @@ describe EnvironmentStatus do
end
describe '.for_merge_request' do
- let(:admin) { create(:admin) }
- let(:pipeline) { create(:ci_pipeline, sha: sha) }
+ let(:admin) { create(:admin) }
+ let!(:pipeline) { create(:ci_pipeline, sha: sha, merge_requests_as_head_pipeline: [merge_request]) }
it 'is based on merge_request.diff_head_sha' do
expect(merge_request).to receive(:diff_head_sha)
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 9dc32a815d8..16624ce47d0 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -810,4 +810,125 @@ describe Group do
it { is_expected.to be_truthy }
end
end
+
+ describe '#first_auto_devops_config' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:group) { create(:group) }
+
+ subject { group.first_auto_devops_config }
+
+ where(:instance_value, :group_value, :config) do
+ # Instance level enabled
+ true | nil | { status: true, scope: :instance }
+ true | true | { status: true, scope: :group }
+ true | false | { status: false, scope: :group }
+
+ # Instance level disabled
+ false | nil | { status: false, scope: :instance }
+ false | true | { status: true, scope: :group }
+ false | false | { status: false, scope: :group }
+ end
+
+ with_them do
+ before do
+ stub_application_setting(auto_devops_enabled: instance_value)
+
+ group.update_attribute(:auto_devops_enabled, group_value)
+ end
+
+ it { is_expected.to eq(config) }
+ end
+
+ context 'with parent groups', :nested_groups do
+ where(:instance_value, :parent_value, :group_value, :config) do
+ # Instance level enabled
+ true | nil | nil | { status: true, scope: :instance }
+ true | nil | true | { status: true, scope: :group }
+ true | nil | false | { status: false, scope: :group }
+
+ true | true | nil | { status: true, scope: :group }
+ true | true | true | { status: true, scope: :group }
+ true | true | false | { status: false, scope: :group }
+
+ true | false | nil | { status: false, scope: :group }
+ true | false | true | { status: true, scope: :group }
+ true | false | false | { status: false, scope: :group }
+
+ # Instance level disable
+ false | nil | nil | { status: false, scope: :instance }
+ false | nil | true | { status: true, scope: :group }
+ false | nil | false | { status: false, scope: :group }
+
+ false | true | nil | { status: true, scope: :group }
+ false | true | true | { status: true, scope: :group }
+ false | true | false | { status: false, scope: :group }
+
+ false | false | nil | { status: false, scope: :group }
+ false | false | true | { status: true, scope: :group }
+ false | false | false | { status: false, scope: :group }
+ end
+
+ with_them do
+ before do
+ stub_application_setting(auto_devops_enabled: instance_value)
+ parent = create(:group, auto_devops_enabled: parent_value)
+
+ group.update!(
+ auto_devops_enabled: group_value,
+ parent: parent
+ )
+ end
+
+ it { is_expected.to eq(config) }
+ end
+ end
+ end
+
+ describe '#auto_devops_enabled?' do
+ subject { group.auto_devops_enabled? }
+
+ context 'when auto devops is explicitly enabled on group' do
+ let(:group) { create(:group, :auto_devops_enabled) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when auto devops is explicitly disabled on group' do
+ let(:group) { create(:group, :auto_devops_disabled) }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when auto devops is implicitly enabled or disabled' do
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+
+ group.update!(parent: parent_group)
+ end
+
+ context 'when auto devops is enabled on root group' do
+ let(:root_group) { create(:group, :auto_devops_enabled) }
+ let(:subgroup) { create(:group, parent: root_group) }
+ let(:parent_group) { create(:group, parent: subgroup) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when auto devops is disabled on root group' do
+ let(:root_group) { create(:group, :auto_devops_disabled) }
+ let(:subgroup) { create(:group, parent: root_group) }
+ let(:parent_group) { create(:group, parent: subgroup) }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when auto devops is disabled on parent group and enabled on root group' do
+ let(:root_group) { create(:group, :auto_devops_enabled) }
+ let(:parent_group) { create(:group, :auto_devops_disabled, parent: root_group) }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+ end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index e530e0302f5..53f5307ea0b 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe MergeRequestDiff do
+ include RepoHelpers
+
let(:diff_with_commits) { create(:merge_request).merge_request_diff }
describe 'validations' do
@@ -194,6 +196,25 @@ describe MergeRequestDiff do
expect(diff_file).to be_binary
expect(diff_file.diff).to eq(mr_diff.compare.diffs(paths: [path]).to_a.first.diff)
end
+
+ context 'with diffs that contain a null byte' do
+ let(:filename) { 'test-null.txt' }
+ let(:content) { "a" * 10000 + "\x00" }
+ let(:project) { create(:project, :repository) }
+ let(:branch) { 'null-data' }
+ let(:target_branch) { 'master' }
+
+ it 'saves diffs correctly' do
+ create_file_in_repo(project, target_branch, branch, filename, content)
+
+ mr_diff = create(:merge_request, target_project: project, source_project: project, source_branch: branch, target_branch: target_branch).merge_request_diff
+ diff_file = mr_diff.merge_request_diff_files.find_by(new_path: filename)
+
+ expect(diff_file).to be_binary
+ expect(diff_file.diff).to eq(mr_diff.compare.diffs(paths: [filename]).to_a.first.diff)
+ expect(diff_file.diff).to include(content)
+ end
+ end
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 07cb4c9c1e3..22998bc5b6a 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -270,6 +270,25 @@ describe MergeRequest do
end
end
+ describe '.recent_target_branches' do
+ let(:project) { create(:project) }
+ let!(:merge_request1) { create(:merge_request, :opened, source_project: project, target_branch: 'feature') }
+ let!(:merge_request2) { create(:merge_request, :closed, source_project: project, target_branch: 'merge-test') }
+ let!(:merge_request3) { create(:merge_request, :opened, source_project: project, target_branch: 'fix') }
+ let!(:merge_request4) { create(:merge_request, :closed, source_project: project, target_branch: 'feature') }
+
+ before do
+ merge_request1.update_columns(updated_at: 1.day.since)
+ merge_request2.update_columns(updated_at: 2.days.since)
+ merge_request3.update_columns(updated_at: 3.days.since)
+ merge_request4.update_columns(updated_at: 4.days.since)
+ end
+
+ it 'returns target branches sort by updated at desc' do
+ expect(described_class.recent_target_branches).to match_array(['feature', 'merge-test', 'fix'])
+ end
+ end
+
describe '#target_branch_sha' do
let(:project) { create(:project, :repository) }
@@ -1168,8 +1187,10 @@ describe MergeRequest do
end
context 'head pipeline' do
+ let(:diff_head_sha) { Digest::SHA1.hexdigest(SecureRandom.hex) }
+
before do
- allow(subject).to receive(:diff_head_sha).and_return('lastsha')
+ allow(subject).to receive(:diff_head_sha).and_return(diff_head_sha)
end
describe '#head_pipeline' do
@@ -1197,7 +1218,15 @@ describe MergeRequest do
end
it 'returns the pipeline for MR with recent pipeline' do
- pipeline = create(:ci_empty_pipeline, sha: 'lastsha')
+ pipeline = create(:ci_empty_pipeline, sha: diff_head_sha)
+ subject.update_attribute(:head_pipeline_id, pipeline.id)
+
+ expect(subject.actual_head_pipeline).to eq(subject.head_pipeline)
+ expect(subject.actual_head_pipeline).to eq(pipeline)
+ end
+
+ it 'returns the pipeline for MR with recent merge request pipeline' do
+ pipeline = create(:ci_empty_pipeline, sha: 'merge-sha', source_sha: diff_head_sha)
subject.update_attribute(:head_pipeline_id, pipeline.id)
expect(subject.actual_head_pipeline).to eq(subject.head_pipeline)
@@ -1331,7 +1360,7 @@ describe MergeRequest do
sha: shas.second)
end
- let!(:merge_request_pipeline) do
+ let!(:detached_merge_request_pipeline) do
create(:ci_pipeline,
source: :merge_request_event,
project: project,
@@ -1357,7 +1386,7 @@ describe MergeRequest do
it 'returns merge request pipeline first' do
expect(merge_request.all_pipelines)
- .to eq([merge_request_pipeline,
+ .to eq([detached_merge_request_pipeline,
branch_pipeline])
end
@@ -1370,7 +1399,7 @@ describe MergeRequest do
sha: shas.first)
end
- let!(:merge_request_pipeline_2) do
+ let!(:detached_merge_request_pipeline_2) do
create(:ci_pipeline,
source: :merge_request_event,
project: project,
@@ -1381,8 +1410,8 @@ describe MergeRequest do
it 'returns merge request pipelines first' do
expect(merge_request.all_pipelines)
- .to eq([merge_request_pipeline_2,
- merge_request_pipeline,
+ .to eq([detached_merge_request_pipeline_2,
+ detached_merge_request_pipeline,
branch_pipeline_2,
branch_pipeline])
end
@@ -1397,7 +1426,7 @@ describe MergeRequest do
sha: shas.first)
end
- let!(:merge_request_pipeline_2) do
+ let!(:detached_merge_request_pipeline_2) do
create(:ci_pipeline,
source: :merge_request_event,
project: project,
@@ -1420,16 +1449,35 @@ describe MergeRequest do
it 'returns only related merge request pipelines' do
expect(merge_request.all_pipelines)
- .to eq([merge_request_pipeline,
+ .to eq([detached_merge_request_pipeline,
branch_pipeline_2,
branch_pipeline])
expect(merge_request_2.all_pipelines)
- .to eq([merge_request_pipeline_2,
+ .to eq([detached_merge_request_pipeline_2,
branch_pipeline_2,
branch_pipeline])
end
end
+
+ context 'when detached merge request pipeline is run on head ref of the merge request' do
+ let!(:detached_merge_request_pipeline) do
+ create(:ci_pipeline,
+ source: :merge_request_event,
+ project: project,
+ ref: merge_request.ref_path,
+ sha: shas.second,
+ merge_request: merge_request)
+ end
+
+ it 'sets the head ref of the merge request to the pipeline ref' do
+ expect(detached_merge_request_pipeline.ref).to match(%r{refs/merge-requests/\d+/head})
+ end
+
+ it 'includes the detached merge request pipeline even though the ref is custom path' do
+ expect(merge_request.all_pipelines).to include(detached_merge_request_pipeline)
+ end
+ end
end
end
@@ -1470,6 +1518,37 @@ describe MergeRequest do
end
end
+ context 'when detached merge request pipeline is run on head ref of the merge request' do
+ let!(:pipeline) do
+ create(:ci_pipeline,
+ source: :merge_request_event,
+ project: merge_request.source_project,
+ ref: merge_request.ref_path,
+ sha: sha,
+ merge_request: merge_request)
+ end
+
+ let(:sha) { merge_request.diff_head_sha }
+
+ it 'sets the head ref of the merge request to the pipeline ref' do
+ expect(pipeline.ref).to match(%r{refs/merge-requests/\d+/head})
+ end
+
+ it 'updates correctly even though the target branch name of the merge request is different from the pipeline ref' do
+ expect { subject }
+ .to change { merge_request.reload.head_pipeline }
+ .from(nil).to(pipeline)
+ end
+
+ context 'when sha is not HEAD of the source branch' do
+ let(:sha) { merge_request.diff_base_sha }
+
+ it 'does not update head pipeline' do
+ expect { subject }.not_to change { merge_request.reload.head_pipeline }
+ end
+ end
+ end
+
context 'when there are no pipelines with the diff head sha' do
it 'does not update the head pipeline' do
expect { subject }
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 475fbe56e4d..aadc298ae0b 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -775,4 +775,28 @@ describe Namespace do
end
end
end
+
+ describe '#auto_devops_enabled' do
+ context 'with users' do
+ let(:user) { create(:user) }
+
+ subject { user.namespace.auto_devops_enabled? }
+
+ before do
+ user.namespace.update!(auto_devops_enabled: auto_devops_enabled)
+ end
+
+ context 'when auto devops is explicitly enabled' do
+ let(:auto_devops_enabled) { true }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when auto devops is explicitly disabled' do
+ let(:auto_devops_enabled) { false }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 385b8a7959f..eb6f6ff5faf 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -208,6 +208,24 @@ describe Note do
end
end
+ describe "edited?" do
+ let(:note) { build(:note, updated_by_id: nil, created_at: Time.now, updated_at: Time.now + 5.hours) }
+
+ context "with updated_by" do
+ it "returns true" do
+ note.updated_by = build(:user)
+
+ expect(note.edited?).to be_truthy
+ end
+ end
+
+ context "without updated_by" do
+ it "returns false" do
+ expect(note.edited?).to be_falsy
+ end
+ end
+ end
+
describe "confidential?" do
it "delegates to noteable" do
issue_note = build(:note, :on_issue)
diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb
index 47f70e6648a..56e587262ef 100644
--- a/spec/models/project_services/kubernetes_service_spec.rb
+++ b/spec/models/project_services/kubernetes_service_spec.rb
@@ -323,13 +323,14 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
end
context 'with valid pods' do
- let(:pod) { kube_pod(app: environment.slug) }
+ let(:pod) { kube_pod(environment_slug: environment.slug, project_slug: project.full_path_slug) }
+ let(:pod_with_no_terminal) { kube_pod(environment_slug: environment.slug, project_slug: project.full_path_slug, status: "Pending") }
let(:terminals) { kube_terminals(service, pod) }
before do
stub_reactive_cache(
service,
- pods: [pod, pod, kube_pod(app: "should-be-filtered-out")]
+ pods: [pod, pod, pod_with_no_terminal, kube_pod(environment_slug: "should-be-filtered-out")]
)
end
@@ -360,14 +361,16 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
context 'when kubernetes responds with valid pods' do
before do
stub_kubeclient_pods
+ stub_kubeclient_deployments # Used by EE
end
- it { is_expected.to eq(pods: [kube_pod]) }
+ it { is_expected.to include(pods: [kube_pod]) }
end
context 'when kubernetes responds with 500s' do
before do
stub_kubeclient_pods(status: 500)
+ stub_kubeclient_deployments(status: 500) # Used by EE
end
it { expect { subject }.to raise_error(Kubeclient::HttpError) }
@@ -376,9 +379,10 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
context 'when kubernetes responds with 404s' do
before do
stub_kubeclient_pods(status: 404)
+ stub_kubeclient_deployments(status: 404) # Used by EE
end
- it { is_expected.to eq(pods: []) }
+ it { is_expected.to include(pods: []) }
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 71bd7972436..1ea54eeb4f7 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -136,15 +136,6 @@ describe Project do
end
end
- describe '#boards' do
- it 'raises an error when attempting to add more than one board to the project' do
- subject.boards.build
-
- expect { subject.boards.build }.to raise_error(Project::BoardLimitExceeded, 'Number of permitted boards exceeded')
- expect(subject.boards.size).to eq 1
- end
- end
-
describe 'ci_pipelines association' do
it 'returns only pipelines from ci_sources' do
expect(Ci::Pipeline).to receive(:ci_sources).and_call_original
@@ -2388,6 +2379,12 @@ describe Project do
project.change_head(project.default_branch)
end
+ it 'updates commit count' do
+ expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:commit_count])
+
+ project.change_head(project.default_branch)
+ end
+
it 'copies the gitattributes' do
expect(project.repository).to receive(:copy_gitattributes).with(project.default_branch)
project.change_head(project.default_branch)
@@ -3631,12 +3628,36 @@ describe Project do
subject { project.auto_devops_enabled? }
+ context 'when explicitly enabled' do
+ before do
+ create(:project_auto_devops, project: project)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when explicitly disabled' do
+ before do
+ create(:project_auto_devops, project: project, enabled: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
context 'when enabled in settings' do
before do
stub_application_setting(auto_devops_enabled: true)
end
it { is_expected.to be_truthy }
+ end
+
+ context 'when disabled in settings' do
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ end
+
+ it { is_expected.to be_falsey }
context 'when explicitly enabled' do
before do
@@ -3648,34 +3669,91 @@ describe Project do
context 'when explicitly disabled' do
before do
- create(:project_auto_devops, project: project, enabled: false)
+ create(:project_auto_devops, :disabled, project: project)
end
it { is_expected.to be_falsey }
end
end
- context 'when disabled in settings' do
+ context 'when force_autodevops_on_by_default is enabled for the project' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with group parents' do
+ let(:instance_enabled) { true }
+
before do
- stub_application_setting(auto_devops_enabled: false)
+ stub_application_setting(auto_devops_enabled: instance_enabled)
+ project.update!(namespace: parent_group)
end
- it { is_expected.to be_falsey }
+ context 'when enabled on parent' do
+ let(:parent_group) { create(:group, :auto_devops_enabled) }
- context 'when explicitly enabled' do
- before do
- create(:project_auto_devops, project: project)
+ context 'when auto devops instance enabled' do
+ it { is_expected.to be_truthy }
end
- it { is_expected.to be_truthy }
+ context 'when auto devops instance disabled' do
+ let(:instance_disabled) { false }
+
+ it { is_expected.to be_truthy }
+ end
end
- context 'when force_autodevops_on_by_default is enabled for the project' do
- before do
- Feature.get(:force_autodevops_on_by_default).enable_percentage_of_actors(100)
+ context 'when disabled on parent' do
+ let(:parent_group) { create(:group, :auto_devops_disabled) }
+
+ context 'when auto devops instance enabled' do
+ it { is_expected.to be_falsy }
end
- it { is_expected.to be_truthy }
+ context 'when auto devops instance disabled' do
+ let(:instance_disabled) { false }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
+ context 'when enabled on root parent', :nested_groups do
+ let(:parent_group) { create(:group, parent: create(:group, :auto_devops_enabled)) }
+
+ context 'when auto devops instance enabled' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when auto devops instance disabled' do
+ let(:instance_disabled) { false }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when explicitly disabled on parent' do
+ let(:parent_group) { create(:group, :auto_devops_disabled, parent: create(:group, :auto_devops_enabled)) }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
+ context 'when disabled on root parent', :nested_groups do
+ let(:parent_group) { create(:group, parent: create(:group, :auto_devops_disabled)) }
+
+ context 'when auto devops instance enabled' do
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when auto devops instance disabled' do
+ let(:instance_disabled) { false }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'when explicitly disabled on parent' do
+ let(:parent_group) { create(:group, :auto_devops_disabled, parent: create(:group, :auto_devops_enabled)) }
+
+ it { is_expected.to be_falsy }
+ end
end
end
end
@@ -3722,15 +3800,52 @@ describe Project do
end
end
end
+
+ context 'when enabled on group' do
+ it 'has auto devops implicitly enabled' do
+ project.update(namespace: create(:group, :auto_devops_enabled))
+
+ expect(project).to have_auto_devops_implicitly_enabled
+ end
+ end
+
+ context 'when enabled on parent group' do
+ it 'has auto devops implicitly enabled' do
+ subgroup = create(:group, parent: create(:group, :auto_devops_enabled))
+ project.update(namespace: subgroup)
+
+ expect(project).to have_auto_devops_implicitly_enabled
+ end
+ end
end
describe '#has_auto_devops_implicitly_disabled?' do
+ set(:project) { create(:project) }
+
before do
allow(Feature).to receive(:enabled?).and_call_original
Feature.get(:force_autodevops_on_by_default).enable_percentage_of_actors(0)
end
- set(:project) { create(:project) }
+ context 'when explicitly disabled' do
+ before do
+ create(:project_auto_devops, project: project, enabled: false)
+ end
+
+ it 'does not have auto devops implicitly disabled' do
+ expect(project).not_to have_auto_devops_implicitly_disabled
+ end
+ end
+
+ context 'when explicitly enabled' do
+ before do
+ create(:project_auto_devops, project: project, enabled: true)
+ end
+
+ it 'does not have auto devops implicitly disabled' do
+ expect(project).not_to have_auto_devops_implicitly_disabled
+ end
+ end
context 'when enabled in settings' do
before do
@@ -3753,6 +3868,8 @@ describe Project do
context 'when force_autodevops_on_by_default is enabled for the project' do
before do
+ create(:project_auto_devops, project: project, enabled: false)
+
Feature.get(:force_autodevops_on_by_default).enable_percentage_of_actors(100)
end
@@ -3761,23 +3878,20 @@ describe Project do
end
end
- context 'when explicitly disabled' do
- before do
- create(:project_auto_devops, project: project, enabled: false)
- end
+ context 'when disabled on group' do
+ it 'has auto devops implicitly disabled' do
+ project.update!(namespace: create(:group, :auto_devops_disabled))
- it 'does not have auto devops implicitly disabled' do
- expect(project).not_to have_auto_devops_implicitly_disabled
+ expect(project).to have_auto_devops_implicitly_disabled
end
end
- context 'when explicitly enabled' do
- before do
- create(:project_auto_devops, project: project, enabled: true)
- end
+ context 'when disabled on parent group' do
+ it 'has auto devops implicitly disabled' do
+ subgroup = create(:group, parent: create(:group, :auto_devops_disabled))
+ project.update!(namespace: subgroup)
- it 'does not have auto devops implicitly disabled' do
- expect(project).not_to have_auto_devops_implicitly_disabled
+ expect(project).to have_auto_devops_implicitly_disabled
end
end
end
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index 3ccc706edf2..7be8d67ba9e 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -71,6 +71,14 @@ describe ProjectWiki do
expect(project_wiki.create_page("index", "test content")).to be_truthy
end
+ it "creates a new wiki repo with a default commit message" do
+ expect(project_wiki.create_page("index", "test content", :markdown, "")).to be_truthy
+
+ page = project_wiki.find_page('index')
+
+ expect(page.last_version.message).to eq("#{user.username} created page: index")
+ end
+
it "raises CouldNotCreateWikiError if it can't create the wiki repository" do
# Create a fresh project which will not have a wiki
project_wiki = described_class.new(create(:project), user)
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 70630467d24..6599b4e765a 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1095,65 +1095,69 @@ describe Repository do
end
end
- describe '#exists?' do
- it 'returns true when a repository exists' do
- expect(repository.exists?).to be(true)
- end
-
- it 'returns false if no full path can be constructed' do
- allow(repository).to receive(:full_path).and_return(nil)
-
- expect(repository.exists?).to be(false)
- end
-
- context 'with broken storage', :broken_storage do
- it 'should raise a storage error' do
- expect_to_raise_storage_error { broken_repository.exists? }
- end
- end
-
+ shared_examples 'asymmetric cached method' do |method|
context 'asymmetric caching', :use_clean_rails_memory_store_caching, :request_store do
let(:cache) { repository.send(:cache) }
let(:request_store_cache) { repository.send(:request_store_cache) }
context 'when it returns true' do
before do
- expect(repository.raw_repository).to receive(:exists?).once.and_return(true)
+ expect(repository.raw_repository).to receive(method).once.and_return(true)
end
it 'caches the output in RequestStore' do
expect do
- repository.exists?
- end.to change { request_store_cache.read(:exists?) }.from(nil).to(true)
+ repository.send(method)
+ end.to change { request_store_cache.read(method) }.from(nil).to(true)
end
it 'caches the output in RepositoryCache' do
expect do
- repository.exists?
- end.to change { cache.read(:exists?) }.from(nil).to(true)
+ repository.send(method)
+ end.to change { cache.read(method) }.from(nil).to(true)
end
end
context 'when it returns false' do
before do
- expect(repository.raw_repository).to receive(:exists?).once.and_return(false)
+ expect(repository.raw_repository).to receive(method).once.and_return(false)
end
it 'caches the output in RequestStore' do
expect do
- repository.exists?
- end.to change { request_store_cache.read(:exists?) }.from(nil).to(false)
+ repository.send(method)
+ end.to change { request_store_cache.read(method) }.from(nil).to(false)
end
it 'does NOT cache the output in RepositoryCache' do
expect do
- repository.exists?
- end.not_to change { cache.read(:exists?) }.from(nil)
+ repository.send(method)
+ end.not_to change { cache.read(method) }.from(nil)
end
end
end
end
+ describe '#exists?' do
+ it 'returns true when a repository exists' do
+ expect(repository.exists?).to be(true)
+ end
+
+ it 'returns false if no full path can be constructed' do
+ allow(repository).to receive(:full_path).and_return(nil)
+
+ expect(repository.exists?).to be(false)
+ end
+
+ context 'with broken storage', :broken_storage do
+ it 'should raise a storage error' do
+ expect_to_raise_storage_error { broken_repository.exists? }
+ end
+ end
+
+ it_behaves_like 'asymmetric cached method', :exists?
+ end
+
describe '#has_visible_content?' do
before do
# If raw_repository.has_visible_content? gets called more than once then
@@ -1271,6 +1275,8 @@ describe Repository do
repository.root_ref
repository.root_ref
end
+
+ it_behaves_like 'asymmetric cached method', :root_ref
end
describe '#expire_root_ref_cache' do
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 85b157a9435..1be29d039a7 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -660,6 +660,68 @@ describe User do
end
end
+ describe '#highest_role' do
+ let(:user) { create(:user) }
+
+ let(:group) { create(:group) }
+
+ it 'returns NO_ACCESS if none has been set' do
+ expect(user.highest_role).to eq(Gitlab::Access::NO_ACCESS)
+ end
+
+ it 'returns MAINTAINER if user is maintainer of a project' do
+ create(:project, group: group) do |project|
+ project.add_maintainer(user)
+ end
+
+ expect(user.highest_role).to eq(Gitlab::Access::MAINTAINER)
+ end
+
+ it 'returns the highest role if user is member of multiple projects' do
+ create(:project, group: group) do |project|
+ project.add_maintainer(user)
+ end
+
+ create(:project, group: group) do |project|
+ project.add_developer(user)
+ end
+
+ expect(user.highest_role).to eq(Gitlab::Access::MAINTAINER)
+ end
+
+ it 'returns MAINTAINER if user is maintainer of a group' do
+ create(:group) do |group|
+ group.add_user(user, GroupMember::MAINTAINER)
+ end
+
+ expect(user.highest_role).to eq(Gitlab::Access::MAINTAINER)
+ end
+
+ it 'returns the highest role if user is member of multiple groups' do
+ create(:group) do |group|
+ group.add_user(user, GroupMember::MAINTAINER)
+ end
+
+ create(:group) do |group|
+ group.add_user(user, GroupMember::DEVELOPER)
+ end
+
+ expect(user.highest_role).to eq(Gitlab::Access::MAINTAINER)
+ end
+
+ it 'returns the highest role if user is member of multiple groups and projects' do
+ create(:group) do |group|
+ group.add_user(user, GroupMember::DEVELOPER)
+ end
+
+ create(:project, group: group) do |project|
+ project.add_maintainer(user)
+ end
+
+ expect(user.highest_role).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+
describe '#update_tracked_fields!', :clean_gitlab_redis_shared_state do
let(:request) { OpenStruct.new(remote_ip: "127.0.0.1") }
let(:user) { create(:user) }
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 0ad50c6f91f..92bdaa8b8b8 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -76,15 +76,10 @@ describe GroupPolicy do
context 'with no user and public project' do
let(:project) { create(:project, :public) }
- let(:user) { create(:user) }
let(:current_user) { nil }
before do
- Projects::GroupLinks::CreateService.new(
- project,
- user,
- link_group_access: ProjectGroupLink::DEVELOPER
- ).execute(group)
+ create(:project_group_link, project: project, group: group)
end
it { expect_disallowed(:read_group) }
@@ -96,11 +91,7 @@ describe GroupPolicy do
let(:current_user) { create(:user) }
before do
- Projects::GroupLinks::CreateService.new(
- project,
- user,
- link_group_access: ProjectGroupLink::DEVELOPER
- ).execute(group)
+ create(:project_group_link, project: project, group: group)
end
it { expect_disallowed(:read_group) }
diff --git a/spec/policies/identity_provider_policy_spec.rb b/spec/policies/identity_provider_policy_spec.rb
new file mode 100644
index 00000000000..2520469d4e7
--- /dev/null
+++ b/spec/policies/identity_provider_policy_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe IdentityProviderPolicy do
+ subject(:policy) { described_class.new(user, provider) }
+ let(:user) { User.new }
+ let(:provider) { :a_provider }
+
+ describe '#rules' do
+ it { is_expected.to be_allowed(:link) }
+ it { is_expected.to be_allowed(:unlink) }
+
+ context 'when user is anonymous' do
+ let(:user) { nil }
+
+ it { is_expected.not_to be_allowed(:link) }
+ it { is_expected.not_to be_allowed(:unlink) }
+ end
+
+ %w[saml cas3].each do |provider_name|
+ context "when provider is #{provider_name}" do
+ let(:provider) { provider_name }
+
+ it { is_expected.to be_allowed(:link) }
+ it { is_expected.not_to be_allowed(:unlink) }
+ end
+ end
+ end
+end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 772d1fbee2b..c12c4677af1 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -15,7 +15,7 @@ describe ProjectPolicy do
read_project_for_iids read_issue_iid read_label
read_milestone read_project_snippet read_project_member read_note
create_project create_issue create_note upload_file create_merge_request_in
- award_emoji read_release
+ award_emoji
]
end
@@ -24,7 +24,7 @@ describe ProjectPolicy do
download_code fork_project create_project_snippet update_issue
admin_issue admin_label admin_list read_commit_status read_build
read_container_image read_pipeline read_environment read_deployment
- read_merge_request download_wiki_code read_sentry_issue
+ read_merge_request download_wiki_code read_sentry_issue read_release
]
end
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index f7ceaf844be..cda07a0ae09 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -1,6 +1,9 @@
require 'spec_helper'
describe Ci::PipelinePresenter do
+ include Gitlab::Routing
+
+ let(:user) { create(:user) }
let(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
@@ -8,6 +11,11 @@ describe Ci::PipelinePresenter do
described_class.new(pipeline)
end
+ before do
+ project.add_developer(user)
+ allow(presenter).to receive(:current_user) { user }
+ end
+
it 'inherits from Gitlab::View::Presenter::Delegated' do
expect(described_class.superclass).to eq(Gitlab::View::Presenter::Delegated)
end
@@ -68,4 +76,130 @@ describe Ci::PipelinePresenter do
end
end
end
+
+ describe '#ref_text' do
+ subject { presenter.ref_text }
+
+ context 'when pipeline is detached merge request pipeline' do
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ let(:pipeline) { merge_request.all_pipelines.last }
+
+ it 'returns a correct ref text' do
+ is_expected.to eq("for <a class=\"mr-iid\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
+ "with <a class=\"ref-name\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a>")
+ end
+ end
+
+ context 'when pipeline is merge request pipeline' do
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:pipeline) { merge_request.all_pipelines.last }
+
+ it 'returns a correct ref text' do
+ is_expected.to eq("for <a class=\"mr-iid\" href=\"#{project_merge_request_path(merge_request.project, merge_request)}\">#{merge_request.to_reference}</a> " \
+ "with <a class=\"ref-name\" href=\"#{project_commits_path(merge_request.source_project, merge_request.source_branch)}\">#{merge_request.source_branch}</a> " \
+ "into <a class=\"ref-name\" href=\"#{project_commits_path(merge_request.target_project, merge_request.target_branch)}\">#{merge_request.target_branch}</a>")
+ end
+ end
+
+ context 'when pipeline is branch pipeline' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ context 'when ref exists in the repository' do
+ before do
+ allow(pipeline).to receive(:ref_exists?) { true }
+ end
+
+ it 'returns a correct ref text' do
+ is_expected.to eq("for <a class=\"ref-name\" href=\"#{project_commits_path(pipeline.project, pipeline.ref)}\">#{pipeline.ref}</a>")
+ end
+
+ context 'when ref contains malicious script' do
+ let(:pipeline) { create(:ci_pipeline, ref: "<script>alter('1')</script>", project: project) }
+
+ it 'does not include the malicious script' do
+ is_expected.not_to include("<script>alter('1')</script>")
+ end
+ end
+ end
+
+ context 'when ref exists in the repository' do
+ before do
+ allow(pipeline).to receive(:ref_exists?) { false }
+ end
+
+ it 'returns a correct ref text' do
+ is_expected.to eq("for <span class=\"ref-name\">#{pipeline.ref}</span>")
+ end
+
+ context 'when ref contains malicious script' do
+ let(:pipeline) { create(:ci_pipeline, ref: "<script>alter('1')</script>", project: project) }
+
+ it 'does not include the malicious script' do
+ is_expected.not_to include("<script>alter('1')</script>")
+ end
+ end
+ end
+ end
+ end
+
+ describe '#link_to_merge_request' do
+ subject { presenter.link_to_merge_request }
+
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ let(:pipeline) { merge_request.all_pipelines.last }
+
+ it 'returns a correct link' do
+ is_expected
+ .to include(project_merge_request_path(merge_request.project, merge_request))
+ end
+
+ context 'when pipeline is branch pipeline' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'returns nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#link_to_merge_request_source_branch' do
+ subject { presenter.link_to_merge_request_source_branch }
+
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+ let(:pipeline) { merge_request.all_pipelines.last }
+
+ it 'returns a correct link' do
+ is_expected
+ .to include(project_commits_path(merge_request.source_project,
+ merge_request.source_branch))
+ end
+
+ context 'when pipeline is branch pipeline' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'returns nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#link_to_merge_request_target_branch' do
+ subject { presenter.link_to_merge_request_target_branch }
+
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:pipeline) { merge_request.all_pipelines.last }
+
+ it 'returns a correct link' do
+ is_expected
+ .to include(project_commits_path(merge_request.target_project, merge_request.target_branch))
+ end
+
+ context 'when pipeline is branch pipeline' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'returns nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
end
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index 02cefcbc916..4a0f91c4c7a 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe MergeRequestPresenter do
- let(:resource) { create :merge_request, source_project: project }
- let(:project) { create :project }
+ let(:resource) { create(:merge_request, source_project: project) }
+ let(:project) { create(:project) }
let(:user) { create(:user) }
describe '#ci_status' do
@@ -345,6 +345,30 @@ describe MergeRequestPresenter do
end
end
+ describe '#source_branch_commits_path' do
+ subject do
+ described_class.new(resource, current_user: user)
+ .source_branch_commits_path
+ end
+
+ context 'when source branch exists' do
+ it 'returns path' do
+ allow(resource).to receive(:source_branch_exists?) { true }
+
+ is_expected
+ .to eq("/#{resource.source_project.full_path}/commits/#{resource.source_branch}")
+ end
+ end
+
+ context 'when source branch does not exist' do
+ it 'returns nil' do
+ allow(resource).to receive(:source_branch_exists?) { false }
+
+ is_expected.to be_nil
+ end
+ end
+ end
+
describe '#target_branch_tree_path' do
subject do
described_class.new(resource, current_user: user)
@@ -499,4 +523,46 @@ describe MergeRequestPresenter do
end
end
end
+
+ describe '#can_push_to_source_branch' do
+ before do
+ allow(resource).to receive(:source_branch_exists?) { source_branch_exists }
+
+ allow_any_instance_of(Gitlab::UserAccess::RequestCacheExtension)
+ .to receive(:can_push_to_branch?)
+ .with(resource.source_branch)
+ .and_return(can_push_to_branch)
+ end
+
+ subject do
+ described_class.new(resource, current_user: user).can_push_to_source_branch?
+ end
+
+ context 'when source branch exists AND user can push to source branch' do
+ let(:source_branch_exists) { true }
+ let(:can_push_to_branch) { true }
+
+ it 'returns true' do
+ is_expected.to eq(true)
+ end
+ end
+
+ context 'when source branch does not exists' do
+ let(:source_branch_exists) { false }
+ let(:can_push_to_branch) { true }
+
+ it 'returns false' do
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'when user cannot push to source branch' do
+ let(:source_branch_exists) { true }
+ let(:can_push_to_branch) { false }
+
+ it 'returns false' do
+ is_expected.to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/rack_servers/puma_spec.rb b/spec/rack_servers/puma_spec.rb
index 431fab87857..891df4f1a66 100644
--- a/spec/rack_servers/puma_spec.rb
+++ b/spec/rack_servers/puma_spec.rb
@@ -44,11 +44,9 @@ describe 'Puma' do
end
after(:all) do
- begin
- WebMock.disable_net_connect!(allow_localhost: true)
- Process.kill('TERM', @puma_master_pid)
- rescue Errno::ESRCH
- end
+ WebMock.disable_net_connect!(allow_localhost: true)
+ Process.kill('TERM', @puma_master_pid)
+ rescue Errno::ESRCH
end
def wait_puma_boot!(master_pid, ready_file)
diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb
index cd85151ec1b..b184c92824a 100644
--- a/spec/requests/api/internal_spec.rb
+++ b/spec/requests/api/internal_spec.rb
@@ -26,6 +26,21 @@ describe API::Internal do
expect(json_response['redis']).to be(false)
end
+
+ context 'authenticating' do
+ it 'authenticates using a header' do
+ get api("/internal/check"),
+ headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
+
+ expect(response).to have_gitlab_http_status(200)
+ end
+
+ it 'returns 401 when no credentials provided' do
+ get(api("/internal/check"))
+
+ expect(response).to have_gitlab_http_status(401)
+ end
+ end
end
describe 'GET /internal/broadcast_message' do
@@ -237,6 +252,14 @@ describe API::Internal do
expect(json_response['name']).to eq(user.name)
end
+
+ it 'responds successfully when a user is not found' do
+ get(api("/internal/discover"), params: { username: 'noone', secret_token: secret_token })
+
+ expect(response).to have_gitlab_http_status(200)
+
+ expect(response.body).to eq('null')
+ end
end
describe "GET /internal/authorized_keys" do
@@ -324,7 +347,6 @@ describe API::Internal do
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
- expect(json_response["repository_path"]).to eq('/')
expect(json_response["gl_project_path"]).to eq(project.wiki.full_path)
expect(json_response["gl_repository"]).to eq("wiki-#{project.id}")
expect(user.reload.last_activity_on).to be_nil
@@ -337,7 +359,6 @@ describe API::Internal do
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
- expect(json_response["repository_path"]).to eq('/')
expect(json_response["gl_project_path"]).to eq(project.wiki.full_path)
expect(json_response["gl_repository"]).to eq("wiki-#{project.id}")
expect(user.reload.last_activity_on).to eql(Date.today)
@@ -350,7 +371,6 @@ describe API::Internal do
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
- expect(json_response["repository_path"]).to eq('/')
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gl_project_path"]).to eq(project.full_path)
expect(json_response["gitaly"]).not_to be_nil
@@ -370,7 +390,6 @@ describe API::Internal do
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
- expect(json_response["repository_path"]).to eq('/')
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gl_project_path"]).to eq(project.full_path)
expect(json_response["gitaly"]).not_to be_nil
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index fee6312a9c7..4259fda7f04 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -4,32 +4,406 @@ describe API::MergeRequests do
include ProjectForksHelper
let(:base_time) { Time.now }
- let(:user) { create(:user) }
- let(:admin) { create(:user, :admin) }
- let(:non_member) { create(:user) }
- let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace, only_allow_merge_if_pipeline_succeeds: false) }
+ set(:user) { create(:user) }
+ set(:admin) { create(:user, :admin) }
+ let(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace, only_allow_merge_if_pipeline_succeeds: false) }
let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
- let(:pipeline) { create(:ci_empty_pipeline) }
- let(:milestone1) { create(:milestone, title: '0.9', project: project) }
+ let(:milestone1) { create(:milestone, title: '0.9', project: project) }
let!(:merge_request) { create(:merge_request, :simple, milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: "Test", created_at: base_time) }
let!(:merge_request_closed) { create(:merge_request, state: "closed", milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: "Closed test", created_at: base_time + 1.second) }
let!(:merge_request_merged) { create(:merge_request, state: "merged", author: user, assignee: user, source_project: project, target_project: project, title: "Merged test", created_at: base_time + 2.seconds, merge_commit_sha: '9999999999999999999999999999999999999999') }
let!(:merge_request_locked) { create(:merge_request, state: "locked", milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: "Locked test", created_at: base_time + 1.second) }
let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") }
let!(:note2) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "another comment on a MR") }
- let!(:label) do
- create(:label, title: 'label', color: '#FFAABB', project: project)
- end
- let!(:label2) { create(:label, title: 'a-test', color: '#FFFFFF', project: project) }
- let!(:label_link) { create(:label_link, label: label, target: merge_request) }
- let!(:label_link2) { create(:label_link, label: label2, target: merge_request) }
- let!(:downvote) { create(:award_emoji, :downvote, awardable: merge_request) }
- let!(:upvote) { create(:award_emoji, :upvote, awardable: merge_request) }
+ let(:label) { create(:label, title: 'label', color: '#FFAABB', project: project) }
+ let(:label2) { create(:label, title: 'a-test', color: '#FFFFFF', project: project) }
before do
project.add_reporter(user)
end
+ shared_context 'with labels' do
+ before do
+ create(:label_link, label: label, target: merge_request)
+ create(:label_link, label: label2, target: merge_request)
+ end
+ end
+
+ shared_examples 'merge requests list' do
+ context 'when unauthenticated' do
+ it 'returns merge requests for public projects' do
+ get api(endpoint_path)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ end
+ end
+
+ context 'when authenticated' do
+ it 'avoids N+1 queries' do
+ control = ActiveRecord::QueryRecorder.new do
+ get api(endpoint_path, user)
+ end
+
+ create(:merge_request, state: 'closed', milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: 'Test', created_at: base_time)
+
+ merge_request = create(:merge_request, milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: 'Test', created_at: base_time)
+
+ merge_request.metrics.update!(merged_by: user,
+ latest_closed_by: user,
+ latest_closed_at: 1.hour.ago,
+ merged_at: 2.hours.ago)
+
+ expect do
+ get api(endpoint_path, user)
+ end.not_to exceed_query_limit(control)
+ end
+
+ context 'with labels' do
+ include_context 'with labels'
+
+ it 'returns an array of all merge_requests' do
+ get api(endpoint_path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(4)
+ expect(json_response.last['title']).to eq(merge_request.title)
+ expect(json_response.last).to have_key('web_url')
+ expect(json_response.last['sha']).to eq(merge_request.diff_head_sha)
+ expect(json_response.last['merge_commit_sha']).to be_nil
+ expect(json_response.last['merge_commit_sha']).to eq(merge_request.merge_commit_sha)
+ expect(json_response.last['downvotes']).to eq(0)
+ expect(json_response.last['upvotes']).to eq(0)
+ expect(json_response.last['labels']).to eq([label2.title, label.title])
+ expect(json_response.first['title']).to eq(merge_request_merged.title)
+ expect(json_response.first['sha']).to eq(merge_request_merged.diff_head_sha)
+ expect(json_response.first['merge_commit_sha']).not_to be_nil
+ expect(json_response.first['merge_commit_sha']).to eq(merge_request_merged.merge_commit_sha)
+ end
+ end
+
+ it 'returns an array of all merge_requests using simple mode' do
+ path = endpoint_path + '?view=simple'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response.last.keys).to match_array(%w(id iid title web_url created_at description project_id state updated_at))
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(4)
+ expect(json_response.last['iid']).to eq(merge_request.iid)
+ expect(json_response.last['title']).to eq(merge_request.title)
+ expect(json_response.last).to have_key('web_url')
+ expect(json_response.first['iid']).to eq(merge_request_merged.iid)
+ expect(json_response.first['title']).to eq(merge_request_merged.title)
+ expect(json_response.first).to have_key('web_url')
+ end
+
+ it 'returns an array of all merge_requests' do
+ path = endpoint_path + '?state'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(4)
+ expect(json_response.last['title']).to eq(merge_request.title)
+ end
+
+ it 'returns an array of open merge_requests' do
+ path = endpoint_path + '?state=opened'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.last['title']).to eq(merge_request.title)
+ end
+
+ it 'returns an array of closed merge_requests' do
+ path = endpoint_path + '?state=closed'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['title']).to eq(merge_request_closed.title)
+ end
+
+ it 'returns an array of merged merge_requests' do
+ path = endpoint_path + '?state=merged'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['title']).to eq(merge_request_merged.title)
+ end
+
+ it 'matches V4 response schema' do
+ get api(endpoint_path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to match_response_schema('public_api/v4/merge_requests')
+ end
+
+ it 'returns an empty array if no issue matches milestone' do
+ get api(endpoint_path, user), params: { milestone: '1.0.0' }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(0)
+ end
+
+ it 'returns an empty array if milestone does not exist' do
+ get api(endpoint_path, user), params: { milestone: 'foo' }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(0)
+ end
+
+ it 'returns an array of merge requests in given milestone' do
+ get api(endpoint_path, user), params: { milestone: '0.9' }
+
+ closed_issues = json_response.select { |mr| mr['id'] == merge_request_closed.id }
+ expect(closed_issues.length).to eq(1)
+ expect(closed_issues.first['title']).to eq merge_request_closed.title
+ end
+
+ it 'returns an array of merge requests matching state in milestone' do
+ get api(endpoint_path, user), params: { milestone: '0.9', state: 'closed' }
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['id']).to eq(merge_request_closed.id)
+ end
+
+ context 'with labels' do
+ include_context 'with labels'
+
+ it 'returns an array of labeled merge requests' do
+ path = endpoint_path + "?labels=#{label.title}"
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['labels']).to eq([label2.title, label.title])
+ end
+
+ it 'returns an array of labeled merge requests where all labels match' do
+ path = endpoint_path + "?labels=#{label.title},foo,bar"
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(0)
+ end
+
+ it 'returns an empty array if no merge request matches labels' do
+ path = endpoint_path + '?labels=foo,bar'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(0)
+ end
+
+ it 'returns an array of labeled merge requests where all labels match' do
+ path = endpoint_path + "?labels[]=#{label.title}&labels[]=#{label2.title}"
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['labels']).to eq([label2.title, label.title])
+ end
+
+ it 'returns an array of merge requests with any label when filtering by any label' do
+ get api(endpoint_path, user), params: { labels: [" #{label.title} ", " #{label2.title} "] }
+
+ expect_paginated_array_response
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['labels']).to eq([label2.title, label.title])
+ expect(json_response.first['id']).to eq(merge_request.id)
+ end
+
+ it 'returns an array of merge requests with any label when filtering by any label' do
+ get api(endpoint_path, user), params: { labels: ["#{label.title} , #{label2.title}"] }
+
+ expect_paginated_array_response
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['labels']).to eq([label2.title, label.title])
+ expect(json_response.first['id']).to eq(merge_request.id)
+ end
+
+ it 'returns an array of merge requests with any label when filtering by any label' do
+ get api(endpoint_path, user), params: { labels: IssuesFinder::FILTER_ANY }
+
+ expect_paginated_array_response
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['id']).to eq(merge_request.id)
+ end
+
+ it 'returns an array of merge requests without a label when filtering by no label' do
+ get api(endpoint_path, user), params: { labels: IssuesFinder::FILTER_NONE }
+
+ response_ids = json_response.map { |merge_request| merge_request['id'] }
+
+ expect_paginated_array_response
+ expect(response_ids).to contain_exactly(merge_request_closed.id, merge_request_merged.id, merge_request_locked.id)
+ end
+ end
+
+ it 'returns an array of labeled merge requests that are merged for a milestone' do
+ bug_label = create(:label, title: 'bug', color: '#FFAABB', project: project)
+
+ mr1 = create(:merge_request, state: 'merged', source_project: project, target_project: project, milestone: milestone)
+ mr2 = create(:merge_request, state: 'merged', source_project: project, target_project: project, milestone: milestone1)
+ mr3 = create(:merge_request, state: 'closed', source_project: project, target_project: project, milestone: milestone1)
+ _mr = create(:merge_request, state: 'merged', source_project: project, target_project: project, milestone: milestone1)
+
+ create(:label_link, label: bug_label, target: mr1)
+ create(:label_link, label: bug_label, target: mr2)
+ create(:label_link, label: bug_label, target: mr3)
+
+ path = endpoint_path + "?labels=#{bug_label.title}&milestone=#{milestone1.title}&state=merged"
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(1)
+ expect(json_response.first['id']).to eq(mr2.id)
+ end
+
+ context 'with ordering' do
+ before do
+ @mr_later = mr_with_later_created_and_updated_at_time
+ @mr_earlier = mr_with_earlier_created_and_updated_at_time
+ end
+
+ it 'returns an array of merge_requests in ascending order' do
+ path = endpoint_path + '?sort=asc'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(4)
+ response_dates = json_response.map { |merge_request| merge_request['created_at'] }
+ expect(response_dates).to eq(response_dates.sort)
+ end
+
+ it 'returns an array of merge_requests in descending order' do
+ path = endpoint_path + '?sort=desc'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(4)
+ response_dates = json_response.map { |merge_request| merge_request['created_at'] }
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
+
+ context '2 merge requests with equal created_at' do
+ let!(:closed_mr2) do
+ create :merge_request,
+ state: 'closed',
+ milestone: milestone1,
+ author: user,
+ assignee: user,
+ source_project: project,
+ target_project: project,
+ title: "Test",
+ created_at: @mr_earlier.created_at
+ end
+
+ it 'page breaks first page correctly' do
+ get api("#{endpoint_path}?sort=desc&per_page=4", user)
+
+ response_ids = json_response.map { |merge_request| merge_request['id'] }
+
+ expect(response_ids).to include(closed_mr2.id)
+ expect(response_ids).not_to include(@mr_earlier.id)
+ end
+
+ it 'page breaks second page correctly' do
+ get api("#{endpoint_path}?sort=desc&per_page=4&page=2", user)
+
+ response_ids = json_response.map { |merge_request| merge_request['id'] }
+
+ expect(response_ids).not_to include(closed_mr2.id)
+ expect(response_ids).to include(@mr_earlier.id)
+ end
+ end
+
+ it 'returns an array of merge_requests ordered by updated_at' do
+ path = endpoint_path + '?order_by=updated_at'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(4)
+ response_dates = json_response.map { |merge_request| merge_request['updated_at'] }
+ expect(response_dates).to eq(response_dates.sort.reverse)
+ end
+
+ it 'returns an array of merge_requests ordered by created_at' do
+ path = endpoint_path + '?order_by=created_at&sort=asc'
+
+ get api(path, user)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(4)
+ response_dates = json_response.map { |merge_request| merge_request['created_at'] }
+ expect(response_dates).to eq(response_dates.sort)
+ end
+ end
+
+ context 'source_branch param' do
+ it 'returns merge requests with the given source branch' do
+ get api(endpoint_path, user), params: { source_branch: merge_request_closed.source_branch, state: 'all' }
+
+ expect_response_contain_exactly(merge_request_closed, merge_request_merged, merge_request_locked)
+ end
+ end
+
+ context 'target_branch param' do
+ it 'returns merge requests with the given target branch' do
+ get api(endpoint_path, user), params: { target_branch: merge_request_closed.target_branch, state: 'all' }
+
+ expect_response_contain_exactly(merge_request_closed, merge_request_merged, merge_request_locked)
+ end
+ end
+ end
+ end
+
describe 'route shadowing' do
include GrapePathHelpers::NamedRouteMatcher
@@ -356,6 +730,9 @@ describe API::MergeRequests do
describe "GET /projects/:id/merge_requests/:merge_request_iid" do
it 'exposes known attributes' do
+ create(:award_emoji, :downvote, awardable: merge_request)
+ create(:award_emoji, :upvote, awardable: merge_request)
+
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
expect(response).to have_gitlab_http_status(200)
@@ -405,6 +782,8 @@ describe API::MergeRequests do
end
context 'merge_request_metrics' do
+ let(:pipeline) { create(:ci_empty_pipeline) }
+
before do
merge_request.metrics.update!(merged_by: user,
latest_closed_by: user,
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index 9bab1f95150..4e42e233b4c 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -331,7 +331,6 @@ describe API::ProjectClusters do
it 'should update cluster attributes' do
expect(cluster.platform_kubernetes.namespace).to eq('new-namespace')
- expect(cluster.kubernetes_namespace.namespace).to eq('new-namespace')
end
end
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 1f317971a66..71ec091c42c 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -4,12 +4,14 @@ describe API::Releases do
let(:project) { create(:project, :repository, :private) }
let(:maintainer) { create(:user) }
let(:reporter) { create(:user) }
+ let(:guest) { create(:user) }
let(:non_project_member) { create(:user) }
let(:commit) { create(:commit, project: project) }
before do
project.add_maintainer(maintainer)
project.add_reporter(reporter)
+ project.add_guest(guest)
project.repository.add_tag(maintainer, 'v0.1', commit.id)
project.repository.add_tag(maintainer, 'v0.2', commit.id)
@@ -66,6 +68,24 @@ describe API::Releases do
end
end
+ context 'when user is a guest' do
+ it 'responds 403 Forbidden' do
+ get api("/projects/#{project.id}/releases", guest)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'when project is public' do
+ let(:project) { create(:project, :repository, :public) }
+
+ it 'responds 200 OK' do
+ get api("/projects/#{project.id}/releases", guest)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
context 'when user is not a project member' do
it 'cannot find the project' do
get api("/projects/#{project.id}/releases", non_project_member)
@@ -189,6 +209,24 @@ describe API::Releases do
end
end
end
+
+ context 'when user is a guest' do
+ it 'responds 403 Forbidden' do
+ get api("/projects/#{project.id}/releases/v0.1", guest)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'when project is public' do
+ let(:project) { create(:project, :repository, :public) }
+
+ it 'responds 200 OK' do
+ get api("/projects/#{project.id}/releases/v0.1", guest)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
end
context 'when specified tag is not found in the project' do
diff --git a/spec/requests/api/runner_spec.rb b/spec/requests/api/runner_spec.rb
index 9087cccb759..3ccedd8dd06 100644
--- a/spec/requests/api/runner_spec.rb
+++ b/spec/requests/api/runner_spec.rb
@@ -918,6 +918,15 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
it { expect(job).to be_job_execution_timeout }
end
+
+ context 'when failure_reason is unmet_prerequisites' do
+ before do
+ update_job(state: 'failed', failure_reason: 'unmet_prerequisites')
+ job.reload
+ end
+
+ it { expect(job).to be_unmet_prerequisites }
+ end
end
context 'when trace is given' do
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index c48ca832c85..49672591b3b 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -77,6 +77,28 @@ describe API::Search do
it_behaves_like 'response is correct', schema: 'public_api/v4/milestones'
end
+ context 'for users scope' do
+ before do
+ create(:user, name: 'billy')
+
+ get api('/search', user), params: { scope: 'users', search: 'billy' }
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/user/basics'
+
+ context 'when users search feature is disabled' do
+ before do
+ allow(Feature).to receive(:disabled?).with(:users_search, default_enabled: true).and_return(true)
+
+ get api('/search', user), params: { scope: 'users', search: 'billy' }
+ end
+
+ it 'returns 400 error' do
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+ end
+
context 'for snippet_titles scope' do
before do
create(:snippet, :public, title: 'awesome snippet', content: 'snippet content')
@@ -192,6 +214,40 @@ describe API::Search do
it_behaves_like 'response is correct', schema: 'public_api/v4/milestones'
end
+
+ context 'for users scope' do
+ before do
+ user = create(:user, name: 'billy')
+ create(:group_member, :developer, user: user, group: group)
+
+ get api("/groups/#{group.id}/search", user), params: { scope: 'users', search: 'billy' }
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/user/basics'
+
+ context 'when users search feature is disabled' do
+ before do
+ allow(Feature).to receive(:disabled?).with(:users_search, default_enabled: true).and_return(true)
+
+ get api("/groups/#{group.id}/search", user), params: { scope: 'users', search: 'billy' }
+ end
+
+ it 'returns 400 error' do
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+ end
+
+ context 'for users scope with group path as id' do
+ before do
+ user1 = create(:user, name: 'billy')
+ create(:group_member, :developer, user: user1, group: group)
+
+ get api("/groups/#{CGI.escape(group.full_path)}/search", user), params: { scope: 'users', search: 'billy' }
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/user/basics'
+ end
end
end
@@ -269,6 +325,29 @@ describe API::Search do
it_behaves_like 'response is correct', schema: 'public_api/v4/milestones'
end
+ context 'for users scope' do
+ before do
+ user1 = create(:user, name: 'billy')
+ create(:project_member, :developer, user: user1, project: project)
+
+ get api("/projects/#{project.id}/search", user), params: { scope: 'users', search: 'billy' }
+ end
+
+ it_behaves_like 'response is correct', schema: 'public_api/v4/user/basics'
+
+ context 'when users search feature is disabled' do
+ before do
+ allow(Feature).to receive(:disabled?).with(:users_search, default_enabled: true).and_return(true)
+
+ get api("/projects/#{project.id}/search", user), params: { scope: 'users', search: 'billy' }
+ end
+
+ it 'returns 400 error' do
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+ end
+
context 'for notes scope' do
before do
create(:note_on_merge_request, project: project, note: 'awesome note')
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index a879426589d..b84202364e1 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -68,6 +68,13 @@ describe API::Users do
expect(json_response.size).to eq(0)
end
+ it "does not return the highest role" do
+ get api("/users"), params: { username: user.username }
+
+ expect(response).to match_response_schema('public_api/v4/user/basics')
+ expect(json_response.first.keys).not_to include 'highest_role'
+ end
+
context "when public level is restricted" do
before do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
@@ -286,6 +293,13 @@ describe API::Users do
expect(json_response.keys).not_to include 'is_admin'
end
+ it "does not return the user's `highest_role`" do
+ get api("/users/#{user.id}", user)
+
+ expect(response).to match_response_schema('public_api/v4/user/basic')
+ expect(json_response.keys).not_to include 'highest_role'
+ end
+
context 'when authenticated as admin' do
it 'includes the `is_admin` field' do
get api("/users/#{user.id}", admin)
@@ -300,6 +314,12 @@ describe API::Users do
expect(response).to match_response_schema('public_api/v4/user/admin')
expect(json_response.keys).to include 'created_at'
end
+ it 'includes the `highest_role` field' do
+ get api("/users/#{user.id}", admin)
+
+ expect(response).to match_response_schema('public_api/v4/user/admin')
+ expect(json_response['highest_role']).to be(0)
+ end
end
context 'for an anonymous user' do
diff --git a/spec/routing/api_routing_spec.rb b/spec/routing/api_routing_spec.rb
index 5fde4bd885b..3c48ead4ff2 100644
--- a/spec/routing/api_routing_spec.rb
+++ b/spec/routing/api_routing_spec.rb
@@ -7,25 +7,17 @@ describe 'api', 'routing' do
end
it 'does not route to the GraphqlController' do
- expect(get('/api/graphql')).not_to route_to('graphql#execute')
- end
-
- it 'does not expose graphiql' do
- expect(get('/-/graphql-explorer')).not_to route_to('graphiql/rails/editors#show')
+ expect(post('/api/graphql')).not_to route_to('graphql#execute')
end
end
- context 'when graphql is disabled' do
+ context 'when graphql is enabled' do
before do
stub_feature_flags(graphql: true)
end
it 'routes to the GraphqlController' do
- expect(get('/api/graphql')).not_to route_to('graphql#execute')
- end
-
- it 'exposes graphiql' do
- expect(get('/-/graphql-explorer')).not_to route_to('graphiql/rails/editors#show')
+ expect(post('/api/graphql')).to route_to('graphql#execute')
end
end
end
diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb
index 71788028cbf..53271550e8b 100644
--- a/spec/routing/group_routing_spec.rb
+++ b/spec/routing/group_routing_spec.rb
@@ -17,6 +17,10 @@ describe "Groups", "routing" do
expect(get("/#{group_path}")).to route_to('groups#show', id: group_path)
end
+ it "to #details" do
+ expect(get("/groups/#{group_path}/-/details")).to route_to('groups#details', id: group_path)
+ end
+
it "to #activity" do
expect(get("/groups/#{group_path}/-/activity")).to route_to('groups#activity', id: group_path)
end
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index 11040862129..1d992e8a483 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -3,13 +3,16 @@ require 'spec_helper'
describe PipelineEntity do
include Gitlab::Routing
+ set(:project) { create(:project) }
set(:user) { create(:user) }
+ set(:project) { create(:project) }
let(:request) { double('request') }
before do
stub_not_protect_default_branch
allow(request).to receive(:current_user).and_return(user)
+ allow(request).to receive(:project).and_return(project)
end
let(:entity) do
@@ -132,12 +135,12 @@ describe PipelineEntity do
end
context 'when pipeline is detached merge request pipeline' do
- let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
+ let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
let(:project) { merge_request.target_project }
let(:pipeline) { merge_request.merge_request_pipelines.first }
it 'makes detached flag true' do
- expect(subject[:flags][:detached]).to be_truthy
+ expect(subject[:flags][:detached_merge_request_pipeline]).to be_truthy
end
context 'when user is a developer' do
@@ -156,13 +159,13 @@ describe PipelineEntity do
expect(subject[:merge_request][:source_branch])
.to eq(merge_request.source_branch)
- expect(project_branch_path(project, merge_request.source_branch))
+ expect(project_commits_path(project, merge_request.source_branch))
.to include(subject[:merge_request][:source_branch_path])
expect(subject[:merge_request][:target_branch])
.to eq(merge_request.target_branch)
- expect(project_branch_path(project, merge_request.target_branch))
+ expect(project_commits_path(project, merge_request.target_branch))
.to include(subject[:merge_request][:target_branch_path])
end
end
@@ -173,5 +176,19 @@ describe PipelineEntity do
end
end
end
+
+ context 'when pipeline is merge request pipeline' do
+ let(:merge_request) { create(:merge_request, :with_merge_request_pipeline, merge_sha: 'abc') }
+ let(:project) { merge_request.target_project }
+ let(:pipeline) { merge_request.merge_request_pipelines.first }
+
+ it 'makes detached flag false' do
+ expect(subject[:flags][:detached_merge_request_pipeline]).to be_falsy
+ end
+
+ it 'makes atached flag true' do
+ expect(subject[:flags][:merge_request_pipeline]).to be_truthy
+ end
+ end
end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index a21487938a0..0fdd675aa01 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -5,7 +5,7 @@ describe PipelineSerializer do
set(:user) { create(:user) }
let(:serializer) do
- described_class.new(current_user: user)
+ described_class.new(current_user: user, project: project)
end
before do
@@ -102,20 +102,20 @@ describe PipelineSerializer do
let!(:merge_request_1) do
create(:merge_request,
- :with_merge_request_pipeline,
+ :with_detached_merge_request_pipeline,
target_project: project,
target_branch: 'master',
source_project: project,
- source_branch: 'feature-1')
+ source_branch: 'feature')
end
let!(:merge_request_2) do
create(:merge_request,
- :with_merge_request_pipeline,
+ :with_detached_merge_request_pipeline,
target_project: project,
target_branch: 'master',
source_project: project,
- source_branch: 'feature-2')
+ source_branch: '2-mb-file')
end
before do
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index 8021bd338e0..c9d85e96750 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -88,6 +88,12 @@ describe Auth::ContainerRegistryAuthenticationService do
end
end
+ shared_examples 'a deletable since registry 2.7' do
+ it_behaves_like 'an accessible' do
+ let(:actions) { ['delete'] }
+ end
+ end
+
shared_examples 'a pullable' do
it_behaves_like 'an accessible' do
let(:actions) { ['pull'] }
@@ -184,6 +190,19 @@ describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'not a container repository factory'
end
+ context 'disallow developer to delete images since registry 2.7' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:delete"] }
+ end
+
+ it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
+ end
+
context 'allow reporter to pull images' do
before do
project.add_reporter(current_user)
@@ -212,6 +231,19 @@ describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'not a container repository factory'
end
+ context 'disallow reporter to delete images since registry 2.7' do
+ before do
+ project.add_reporter(current_user)
+ end
+
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:delete"] }
+ end
+
+ it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
+ end
+
context 'return a least of privileges' do
before do
project.add_reporter(current_user)
@@ -250,6 +282,19 @@ describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'an inaccessible'
it_behaves_like 'not a container repository factory'
end
+
+ context 'disallow guest to delete images since regsitry 2.7' do
+ before do
+ project.add_guest(current_user)
+ end
+
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:delete"] }
+ end
+
+ it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
+ end
end
context 'for public project' do
@@ -282,6 +327,15 @@ describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'not a container repository factory'
end
+ context 'disallow anyone to delete images since registry 2.7' do
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:delete"] }
+ end
+
+ it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
+ end
+
context 'when repository name is invalid' do
let(:current_params) do
{ scopes: ['repository:invalid:push'] }
@@ -322,6 +376,15 @@ describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'an inaccessible'
it_behaves_like 'not a container repository factory'
end
+
+ context 'disallow anyone to delete images since registry 2.7' do
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:delete"] }
+ end
+
+ it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
+ end
end
context 'for external user' do
@@ -344,6 +407,16 @@ describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'an inaccessible'
it_behaves_like 'not a container repository factory'
end
+
+ context 'disallow anyone to delete images since registry 2.7' do
+ let(:current_user) { create(:user, external: true) }
+ let(:current_params) do
+ { scopes: ["repository:#{project.full_path}:delete"] }
+ end
+
+ it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
+ end
end
end
end
@@ -371,6 +444,16 @@ describe Auth::ContainerRegistryAuthenticationService do
let(:project) { current_project }
end
end
+
+ context 'allow to delete images since registry 2.7' do
+ let(:current_params) do
+ { scopes: ["repository:#{current_project.full_path}:delete"] }
+ end
+
+ it_behaves_like 'a deletable since registry 2.7' do
+ let(:project) { current_project }
+ end
+ end
end
context 'build authorized as user' do
@@ -419,6 +502,16 @@ describe Auth::ContainerRegistryAuthenticationService do
end
end
+ context 'disallow to delete images since registry 2.7' do
+ let(:current_params) do
+ { scopes: ["repository:#{current_project.full_path}:delete"] }
+ end
+
+ it_behaves_like 'an inaccessible' do
+ let(:project) { current_project }
+ end
+ end
+
context 'for other projects' do
context 'when pulling' do
let(:current_params) do
diff --git a/spec/services/ci/prepare_build_service_spec.rb b/spec/services/ci/prepare_build_service_spec.rb
new file mode 100644
index 00000000000..1797f8f964f
--- /dev/null
+++ b/spec/services/ci/prepare_build_service_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::PrepareBuildService do
+ describe '#execute' do
+ let(:build) { create(:ci_build, :preparing) }
+
+ subject { described_class.new(build).execute }
+
+ before do
+ allow(build).to receive(:prerequisites).and_return(prerequisites)
+ end
+
+ shared_examples 'build enqueueing' do
+ it 'enqueues the build' do
+ expect(build).to receive(:enqueue).once
+
+ subject
+ end
+ end
+
+ context 'build has unmet prerequisites' do
+ let(:prerequisite) { double(complete!: true) }
+ let(:prerequisites) { [prerequisite] }
+
+ it 'completes each prerequisite' do
+ expect(prerequisites).to all(receive(:complete!))
+
+ subject
+ end
+
+ include_examples 'build enqueueing'
+
+ context 'prerequisites fail to complete' do
+ before do
+ allow(build).to receive(:enqueue).and_return(false)
+ end
+
+ it 'drops the build' do
+ expect(build).to receive(:drop!).with(:unmet_prerequisites).once
+
+ subject
+ end
+ end
+ end
+
+ context 'build has no prerequisites' do
+ let(:prerequisites) { [] }
+
+ include_examples 'build enqueueing'
+ end
+ end
+end
diff --git a/spec/services/emails/create_service_spec.rb b/spec/services/emails/create_service_spec.rb
index 54692c88623..87f93ec97c9 100644
--- a/spec/services/emails/create_service_spec.rb
+++ b/spec/services/emails/create_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Emails::CreateService do
diff --git a/spec/services/emails/destroy_service_spec.rb b/spec/services/emails/destroy_service_spec.rb
index c3204fac3df..5abe8da2529 100644
--- a/spec/services/emails/destroy_service_spec.rb
+++ b/spec/services/emails/destroy_service_spec.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
require 'spec_helper'
describe Emails::DestroyService do
diff --git a/spec/services/groups/auto_devops_service_spec.rb b/spec/services/groups/auto_devops_service_spec.rb
new file mode 100644
index 00000000000..7f8ab517cef
--- /dev/null
+++ b/spec/services/groups/auto_devops_service_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Groups::AutoDevopsService, '#execute' do
+ set(:group) { create(:group) }
+ set(:user) { create(:user) }
+ let(:group_params) { { auto_devops_enabled: '0' } }
+ let(:service) { described_class.new(group, user, group_params) }
+
+ context 'when user does not have enough privileges' do
+ it 'raises exception' do
+ group.add_developer(user)
+
+ expect do
+ service.execute
+ end.to raise_exception(Gitlab::Access::AccessDeniedError)
+ end
+ end
+
+ context 'when user has enough privileges' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'updates group auto devops enabled accordingly' do
+ service.execute
+
+ expect(group.auto_devops_enabled).to eq(false)
+ end
+
+ context 'when group has projects' do
+ it 'reflects changes on projects' do
+ project_1 = create(:project, namespace: group)
+
+ service.execute
+
+ expect(project_1).not_to have_auto_devops_implicitly_enabled
+ end
+ end
+
+ context 'when group has subgroups' do
+ it 'reflects changes on subgroups' do
+ subgroup_1 = create(:group, parent: group)
+
+ service.execute
+
+ expect(subgroup_1.auto_devops_enabled?).to eq(false)
+ end
+
+ context 'when subgroups have projects', :nested_groups do
+ it 'reflects changes on projects' do
+ subgroup_1 = create(:group, parent: group)
+ project_1 = create(:project, namespace: subgroup_1)
+
+ service.execute
+
+ expect(project_1).not_to have_auto_devops_implicitly_enabled
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb
new file mode 100644
index 00000000000..4d5c87ecc53
--- /dev/null
+++ b/spec/services/labels/available_labels_service_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+describe Labels::AvailableLabelsService do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, group: group) }
+ let(:group) { create(:group) }
+
+ let(:project_label) { create(:label, project: project) }
+ let(:other_project_label) { create(:label) }
+ let(:group_label) { create(:group_label, group: group) }
+ let(:other_group_label) { create(:group_label) }
+ let(:labels) { [project_label, other_project_label, group_label, other_group_label] }
+
+ context '#find_or_create_by_titles' do
+ let(:label_titles) { labels.map(&:title).push('non existing title') }
+
+ context 'when parent is a project' do
+ context 'when a user is not a project member' do
+ it 'returns only relevant label ids' do
+ result = described_class.new(user, project, labels: label_titles).find_or_create_by_titles
+
+ expect(result).to match_array([project_label, group_label])
+ end
+ end
+
+ context 'when a user is a project member' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'creates new labels for not found titles' do
+ result = described_class.new(user, project, labels: label_titles).find_or_create_by_titles
+
+ expect(result.count).to eq(5)
+ expect(result).to include(project_label, group_label)
+ expect(result).not_to include(other_project_label, other_group_label)
+ end
+ end
+ end
+
+ context 'when parent is a group' do
+ context 'when a user is not a group member' do
+ it 'returns only relevant label ids' do
+ result = described_class.new(user, group, labels: label_titles).find_or_create_by_titles
+
+ expect(result).to match_array([group_label])
+ end
+ end
+
+ context 'when a user is a group member' do
+ before do
+ group.add_developer(user)
+ end
+
+ it 'creates new labels for not found titles' do
+ result = described_class.new(user, group, labels: label_titles).find_or_create_by_titles
+
+ expect(result.count).to eq(5)
+ expect(result).to include(group_label)
+ expect(result).not_to include(project_label, other_project_label, other_group_label)
+ end
+ end
+ end
+ end
+
+ context '#filter_labels_ids_in_param' do
+ let(:label_ids) { labels.map(&:id).push(99999) }
+
+ context 'when parent is a project' do
+ it 'returns only relevant label ids' do
+ result = described_class.new(user, project, ids: label_ids).filter_labels_ids_in_param(:ids)
+
+ expect(result).to match_array([project_label.id, group_label.id])
+ end
+ end
+
+ context 'when parent is a group' do
+ it 'returns only relevant label ids' do
+ result = described_class.new(user, group, ids: label_ids).filter_labels_ids_in_param(:ids)
+
+ expect(result).to match_array([group_label.id])
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index d1b110b9806..e8418b09dc2 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -276,6 +276,7 @@ describe Projects::CreateService, '#execute' do
before do
group.add_owner(user)
+ stub_feature_flags(ci_preparing_state: false)
expect(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:namespace_creator).and_return(service_account_creator)
expect(Clusters::Gcp::Kubernetes::FetchKubernetesTokenService).to receive(:new).and_return(secrets_fetcher)
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index dfbdfa2ab69..d3a8ee46f85 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -128,10 +128,8 @@ describe Projects::DestroyService do
it 'keeps project team intact upon an error' do
perform_enqueued_jobs do
- begin
- destroy_project(project, user, {})
- rescue ::Redis::CannotConnectError
- end
+ destroy_project(project, user, {})
+ rescue ::Redis::CannotConnectError
end
expect(project.team.members.count).to eq 2
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index aae50d5307f..4efd360cb30 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -83,6 +83,7 @@ describe Projects::TransferService do
subject { transfer_project(project, user, group) }
before do
+ stub_feature_flags(ci_preparing_state: false)
expect(Clusters::Gcp::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:namespace_creator).and_return(service_account_creator)
expect(Clusters::Gcp::Kubernetes::FetchKubernetesTokenService).to receive(:new).and_return(secrets_fetcher)
end
diff --git a/spec/support/api/schema_matcher.rb b/spec/support/api/schema_matcher.rb
index f7d74df0656..4cf34d43117 100644
--- a/spec/support/api/schema_matcher.rb
+++ b/spec/support/api/schema_matcher.rb
@@ -1,10 +1,16 @@
module SchemaPath
- def self.expand(schema, dir = '')
- Rails.root.join(dir, 'spec', "fixtures/api/schemas/#{schema}.json").to_s
+ def self.expand(schema, dir = nil)
+ if Gitlab.ee? && dir.nil?
+ ee_path = expand(schema, 'ee')
+
+ return ee_path if File.exist?(ee_path)
+ end
+
+ Rails.root.join(dir.to_s, 'spec', "fixtures/api/schemas/#{schema}.json").to_s
end
end
-RSpec::Matchers.define :match_response_schema do |schema, dir: '', **options|
+RSpec::Matchers.define :match_response_schema do |schema, dir: nil, **options|
match do |response|
@errors = JSON::Validator.fully_validate(
SchemaPath.expand(schema, dir), response.body, options)
@@ -18,8 +24,16 @@ RSpec::Matchers.define :match_response_schema do |schema, dir: '', **options|
end
end
-RSpec::Matchers.define :match_schema do |schema, dir: '', **options|
+RSpec::Matchers.define :match_schema do |schema, dir: nil, **options|
match do |data|
- JSON::Validator.validate!(SchemaPath.expand(schema, dir), data, options)
+ @errors = JSON::Validator.fully_validate(
+ SchemaPath.expand(schema, dir), data, options)
+
+ @errors.empty?
+ end
+
+ failure_message do |response|
+ "didn't match the schema defined by #{SchemaPath.expand(schema, dir)}" \
+ " The validation errors were:\n#{@errors.join("\n")}"
end
end
diff --git a/spec/support/api/time_tracking_shared_examples.rb b/spec/support/api/time_tracking_shared_examples.rb
index e883d33f671..15037222630 100644
--- a/spec/support/api/time_tracking_shared_examples.rb
+++ b/spec/support/api/time_tracking_shared_examples.rb
@@ -3,6 +3,8 @@ shared_examples 'an unauthorized API user' do
end
shared_examples 'time tracking endpoints' do |issuable_name|
+ let(:non_member) { create(:user) }
+
issuable_collection_name = issuable_name.pluralize
describe "POST /projects/:id/#{issuable_collection_name}/:#{issuable_name}_id/time_estimate" do
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 6cdc19ac2e5..ca28325eab9 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -18,12 +18,10 @@ module GraphqlHelpers
# Runs a block inside a BatchLoader::Executor wrapper
def batch(max_queries: nil, &blk)
wrapper = proc do
- begin
- BatchLoader::Executor.ensure_current
- yield
- ensure
- BatchLoader::Executor.clear_current
- end
+ BatchLoader::Executor.ensure_current
+ yield
+ ensure
+ BatchLoader::Executor.clear_current
end
if max_queries
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index f525b2f945e..cceb179d53e 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -5,7 +5,7 @@ module JavaScriptFixturesHelpers
extend ActiveSupport::Concern
include Gitlab::Popen
- FIXTURE_PATHS = %w[spec/javascripts/fixtures ee/spec/javascripts/fixtures].freeze
+ extend self
included do |base|
base.around do |example|
@@ -14,32 +14,32 @@ module JavaScriptFixturesHelpers
end
end
+ def fixture_root_path
+ 'spec/javascripts/fixtures'
+ end
+
# Public: Removes all fixture files from given directory
#
- # directory_name - directory of the fixtures (relative to FIXTURE_PATHS)
+ # directory_name - directory of the fixtures (relative to .fixture_root_path)
#
def clean_frontend_fixtures(directory_name)
- FIXTURE_PATHS.each do |fixture_path|
- directory_name = File.expand_path(directory_name, fixture_path)
- Dir[File.expand_path('*.html.raw', directory_name)].each do |file_name|
- FileUtils.rm(file_name)
- end
+ full_directory_name = File.expand_path(directory_name, fixture_root_path)
+ Dir[File.expand_path('*.html.raw', full_directory_name)].each do |file_name|
+ FileUtils.rm(file_name)
end
end
# Public: Store a response object as fixture file
#
# response - string or response object to store
- # fixture_file_name - file name to store the fixture in (relative to FIXTURE_PATHS)
+ # fixture_file_name - file name to store the fixture in (relative to .fixture_root_path)
#
def store_frontend_fixture(response, fixture_file_name)
- FIXTURE_PATHS.each do |fixture_path|
- fixture_file_name = File.expand_path(fixture_file_name, fixture_path)
- fixture = response.respond_to?(:body) ? parse_response(response) : response
+ full_fixture_path = File.expand_path(fixture_file_name, fixture_root_path)
+ fixture = response.respond_to?(:body) ? parse_response(response) : response
- FileUtils.mkdir_p(File.dirname(fixture_file_name))
- File.write(fixture_file_name, fixture)
- end
+ FileUtils.mkdir_p(File.dirname(full_fixture_path))
+ File.write(full_fixture_path, fixture)
end
def remove_repository(project)
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index cca11e112c9..ac52acb6570 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -250,16 +250,19 @@ module KubernetesHelpers
# This is a partial response, it will have many more elements in reality but
# these are the ones we care about at the moment
- def kube_pod(name: "kube-pod", app: "valid-pod-label", status: "Running", track: nil)
+ def kube_pod(name: "kube-pod", environment_slug: "production", project_slug: "project-path-slug", status: "Running", track: nil)
{
"metadata" => {
"name" => name,
"generate_name" => "generated-name-with-suffix",
"creationTimestamp" => "2016-11-25T19:55:19Z",
+ "annotations" => {
+ "app.gitlab.com/env" => environment_slug,
+ "app.gitlab.com/app" => project_slug
+ },
"labels" => {
- "app" => app,
"track" => track
- }
+ }.compact
},
"spec" => {
"containers" => [
@@ -293,13 +296,16 @@ module KubernetesHelpers
}
end
- def kube_deployment(name: "kube-deployment", app: "valid-deployment-label", track: nil)
+ def kube_deployment(name: "kube-deployment", environment_slug: "production", project_slug: "project-path-slug", track: nil)
{
"metadata" => {
"name" => name,
"generation" => 4,
+ "annotations" => {
+ "app.gitlab.com/env" => environment_slug,
+ "app.gitlab.com/app" => project_slug
+ },
"labels" => {
- "app" => app,
"track" => track
}.compact
},
diff --git a/spec/support/helpers/repo_helpers.rb b/spec/support/helpers/repo_helpers.rb
index 3c6956cf5e0..4af90f4af79 100644
--- a/spec/support/helpers/repo_helpers.rb
+++ b/spec/support/helpers/repo_helpers.rb
@@ -115,4 +115,18 @@ eos
commits: commits
)
end
+
+ def create_file_in_repo(
+ project, start_branch, branch_name, filename, content,
+ commit_message: 'Add new content')
+ Files::CreateService.new(
+ project,
+ project.owner,
+ commit_message: commit_message,
+ start_branch: start_branch,
+ branch_name: branch_name,
+ file_path: filename,
+ file_content: content
+ ).execute
+ end
end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index ff21bbe28ca..cfa9151b2d7 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -84,6 +84,10 @@ module StubConfiguration
allow(Gitlab.config.kerberos).to receive_messages(to_settings(messages))
end
+ def stub_gitlab_shell_setting(messages)
+ allow(Gitlab.config.gitlab_shell).to receive_messages(to_settings(messages))
+ end
+
private
# Modifies stubbed messages to also stub possible predicate versions
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index e0c50e533a6..30c8477f16a 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -23,15 +23,13 @@ module StubObjectStorage
Fog.mock!
::Fog::Storage.new(connection_params).tap do |connection|
- begin
- connection.directories.create(key: remote_directory)
+ connection.directories.create(key: remote_directory)
- # Cleanup remaining files
- connection.directories.each do |directory|
- directory.files.map(&:destroy)
- end
- rescue Excon::Error::Conflict
+ # Cleanup remaining files
+ connection.directories.each do |directory|
+ directory.files.map(&:destroy)
end
+ rescue Excon::Error::Conflict
end
end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 80a22134021..dc902d373b8 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -202,12 +202,10 @@ module TestEnv
socket = Gitlab::GitalyClient.address('default').sub('unix:', '')
Integer(sleep_time / sleep_interval).times do
- begin
- Socket.unix(socket)
- return
- rescue
- sleep sleep_interval
- end
+ Socket.unix(socket)
+ return
+ rescue
+ sleep sleep_interval
end
raise "could not connect to gitaly at #{socket.inspect} after #{sleep_time} seconds"
diff --git a/spec/support/pg_stat_activity.rb b/spec/support/pg_stat_activity.rb
deleted file mode 100644
index f93fba08a19..00000000000
--- a/spec/support/pg_stat_activity.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.configure do |config|
- config.before do
- if Gitlab::Database.postgresql? && ENV['PG_STAT_WARNING_THRESHOLD']
- warning_threshold = ENV['PG_STAT_WARNING_THRESHOLD'].to_i
- results = ActiveRecord::Base.connection.execute('SELECT * FROM pg_stat_activity')
- ntuples = results.ntuples
-
- warn("pg_stat_activity count: #{ntuples}")
-
- if ntuples > warning_threshold
- results.each do |result|
- warn result.inspect
- end
- end
- end
- end
-end
diff --git a/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb
new file mode 100644
index 00000000000..a0d994c4d8d
--- /dev/null
+++ b/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+
+RSpec.shared_context 'GroupProjectsFinder context' do
+ let(:group) { create(:group) }
+ let(:subgroup) { create(:group, parent: group) }
+ let(:current_user) { create(:user) }
+ let(:options) { {} }
+
+ let(:finder) { described_class.new(group: group, current_user: current_user, options: options) }
+
+ let!(:public_project) { create(:project, :public, group: group, path: '1') }
+ let!(:private_project) { create(:project, :private, group: group, path: '2') }
+ let!(:shared_project_1) { create(:project, :public, path: '3') }
+ let!(:shared_project_2) { create(:project, :private, path: '4') }
+ let!(:shared_project_3) { create(:project, :internal, path: '5') }
+ let!(:subgroup_project) { create(:project, :public, path: '6', group: subgroup) }
+ let!(:subgroup_private_project) { create(:project, :private, path: '7', group: subgroup) }
+
+ before do
+ shared_project_1.project_group_links.create(group_access: Gitlab::Access::MAINTAINER, group: group)
+ shared_project_2.project_group_links.create(group_access: Gitlab::Access::MAINTAINER, group: group)
+ shared_project_3.project_group_links.create(group_access: Gitlab::Access::MAINTAINER, group: group)
+ end
+end
diff --git a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
new file mode 100644
index 00000000000..b8a9554f55f
--- /dev/null
+++ b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
@@ -0,0 +1,44 @@
+require 'spec_helper'
+
+RSpec.shared_context 'IssuesFinder context' do
+ set(:user) { create(:user) }
+ set(:user2) { create(:user) }
+ set(:group) { create(:group) }
+ set(:subgroup) { create(:group, parent: group) }
+ set(:project1) { create(:project, group: group) }
+ set(:project2) { create(:project) }
+ set(:project3) { create(:project, group: subgroup) }
+ set(:milestone) { create(:milestone, project: project1) }
+ set(:label) { create(:label, project: project2) }
+ set(:issue1) { create(:issue, author: user, assignees: [user], project: project1, milestone: milestone, title: 'gitlab', created_at: 1.week.ago, updated_at: 1.week.ago) }
+ set(:issue2) { create(:issue, author: user, assignees: [user], project: project2, description: 'gitlab', created_at: 1.week.from_now, updated_at: 1.week.from_now) }
+ set(:issue3) { create(:issue, author: user2, assignees: [user2], project: project2, title: 'tanuki', description: 'tanuki', created_at: 2.weeks.from_now, updated_at: 2.weeks.from_now) }
+ set(:issue4) { create(:issue, project: project3) }
+ set(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: issue1) }
+ set(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: issue2) }
+ set(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: issue3) }
+end
+
+RSpec.shared_context 'IssuesFinder#execute context' do
+ let!(:closed_issue) { create(:issue, author: user2, assignees: [user2], project: project2, state: 'closed') }
+ let!(:label_link) { create(:label_link, label: label, target: issue2) }
+ let(:search_user) { user }
+ let(:params) { {} }
+ let(:issues) { described_class.new(search_user, params.reverse_merge(scope: scope, state: 'opened')).execute }
+
+ before(:context) do
+ project1.add_maintainer(user)
+ project2.add_developer(user)
+ project2.add_developer(user2)
+ project3.add_developer(user)
+
+ issue1
+ issue2
+ issue3
+ issue4
+
+ award_emoji1
+ award_emoji2
+ award_emoji3
+ end
+end
diff --git a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
new file mode 100644
index 00000000000..4df80b4168a
--- /dev/null
+++ b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
@@ -0,0 +1,65 @@
+require 'spec_helper'
+
+RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests context' do
+ include ProjectForksHelper
+
+ # We need to explicitly permit Gitaly N+1s because of the specs that use
+ # :request_store. Gitaly N+1 detection is only enabled when :request_store is,
+ # but we don't care about potential N+1s when we're just creating several
+ # projects in the setup phase.
+ def allow_gitaly_n_plus_1
+ Gitlab::GitalyClient.allow_n_plus_1_calls do
+ yield
+ end
+ end
+
+ set(:user) { create(:user) }
+ set(:user2) { create(:user) }
+
+ set(:group) { create(:group) }
+ set(:subgroup) { create(:group, parent: group) }
+ set(:project1) do
+ allow_gitaly_n_plus_1 { create(:project, :public, group: group) }
+ end
+ # We cannot use `set` here otherwise we get:
+ # Failure/Error: allow(RepositoryForkWorker).to receive(:perform_async).and_return(true)
+ # The use of doubles or partial doubles from rspec-mocks outside of the per-test lifecycle is not supported.
+ let(:project2) do
+ allow_gitaly_n_plus_1 do
+ fork_project(project1, user)
+ end
+ end
+ let(:project3) do
+ allow_gitaly_n_plus_1 do
+ fork_project(project1, user).tap do |project|
+ project.update!(archived: true)
+ end
+ end
+ end
+ set(:project4) do
+ allow_gitaly_n_plus_1 { create(:project, :repository, group: subgroup) }
+ end
+ set(:project5) do
+ allow_gitaly_n_plus_1 { create(:project, group: subgroup) }
+ end
+ set(:project6) do
+ allow_gitaly_n_plus_1 { create(:project, group: subgroup) }
+ end
+
+ let!(:merge_request1) { create(:merge_request, author: user, source_project: project2, target_project: project1, target_branch: 'merged-target') }
+ let!(:merge_request2) { create(:merge_request, :conflict, author: user, source_project: project2, target_project: project1, state: 'closed') }
+ let!(:merge_request3) { create(:merge_request, :simple, author: user, source_project: project2, target_project: project2, state: 'locked', title: 'thing WIP thing') }
+ let!(:merge_request4) { create(:merge_request, :simple, author: user, source_project: project3, target_project: project3, title: 'WIP thing') }
+ let!(:merge_request5) { create(:merge_request, :simple, author: user, source_project: project4, target_project: project4, title: '[WIP]') }
+
+ before do
+ project1.add_maintainer(user)
+ project2.add_developer(user)
+ project3.add_developer(user)
+ project4.add_developer(user)
+ project5.add_developer(user)
+ project6.add_developer(user)
+
+ project2.add_developer(user2)
+ end
+end
diff --git a/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
new file mode 100644
index 00000000000..9e1f89ee0ed
--- /dev/null
+++ b/spec/support/shared_contexts/finders/users_finder_shared_contexts.rb
@@ -0,0 +1,8 @@
+require 'spec_helper'
+
+RSpec.shared_context 'UsersFinder#execute filter by project context' do
+ set(:normal_user) { create(:user, username: 'johndoe') }
+ set(:blocked_user) { create(:user, :blocked, username: 'notsorandom') }
+ set(:external_user) { create(:user, :external) }
+ set(:omniauth_user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
+end
diff --git a/spec/support/shared_examples/application_setting_examples.rb b/spec/support/shared_examples/application_setting_examples.rb
new file mode 100644
index 00000000000..e7ec24c5b7e
--- /dev/null
+++ b/spec/support/shared_examples/application_setting_examples.rb
@@ -0,0 +1,252 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'application settings examples' do
+ context 'restricted signup domains' do
+ it 'sets single domain' do
+ setting.domain_whitelist_raw = 'example.com'
+ expect(setting.domain_whitelist).to eq(['example.com'])
+ end
+
+ it 'sets multiple domains with spaces' do
+ setting.domain_whitelist_raw = 'example.com *.example.com'
+ expect(setting.domain_whitelist).to eq(['example.com', '*.example.com'])
+ end
+
+ it 'sets multiple domains with newlines and a space' do
+ setting.domain_whitelist_raw = "example.com\n *.example.com"
+ expect(setting.domain_whitelist).to eq(['example.com', '*.example.com'])
+ end
+
+ it 'sets multiple domains with commas' do
+ setting.domain_whitelist_raw = "example.com, *.example.com"
+ expect(setting.domain_whitelist).to eq(['example.com', '*.example.com'])
+ end
+ end
+
+ context 'blacklisted signup domains' do
+ it 'sets single domain' do
+ setting.domain_blacklist_raw = 'example.com'
+ expect(setting.domain_blacklist).to contain_exactly('example.com')
+ end
+
+ it 'sets multiple domains with spaces' do
+ setting.domain_blacklist_raw = 'example.com *.example.com'
+ expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com')
+ end
+
+ it 'sets multiple domains with newlines and a space' do
+ setting.domain_blacklist_raw = "example.com\n *.example.com"
+ expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com')
+ end
+
+ it 'sets multiple domains with commas' do
+ setting.domain_blacklist_raw = "example.com, *.example.com"
+ expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com')
+ end
+
+ it 'sets multiple domains with semicolon' do
+ setting.domain_blacklist_raw = "example.com; *.example.com"
+ expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com')
+ end
+
+ it 'sets multiple domains with mixture of everything' do
+ setting.domain_blacklist_raw = "example.com; *.example.com\n test.com\sblock.com yes.com"
+ expect(setting.domain_blacklist).to contain_exactly('example.com', '*.example.com', 'test.com', 'block.com', 'yes.com')
+ end
+
+ it 'sets multiple domain with file' do
+ setting.domain_blacklist_file = File.open(Rails.root.join('spec/fixtures/', 'domain_blacklist.txt'))
+ expect(setting.domain_blacklist).to contain_exactly('example.com', 'test.com', 'foo.bar')
+ end
+ end
+
+ describe 'usage ping settings' do
+ context 'when the usage ping is disabled in gitlab.yml' do
+ before do
+ allow(Settings.gitlab).to receive(:usage_ping_enabled).and_return(false)
+ end
+
+ it 'does not allow the usage ping to be configured' do
+ expect(setting.usage_ping_can_be_configured?).to be_falsey
+ end
+
+ context 'when the usage ping is disabled in the DB' do
+ before do
+ setting.usage_ping_enabled = false
+ end
+
+ it 'returns false for usage_ping_enabled' do
+ expect(setting.usage_ping_enabled).to be_falsey
+ end
+ end
+
+ context 'when the usage ping is enabled in the DB' do
+ before do
+ setting.usage_ping_enabled = true
+ end
+
+ it 'returns false for usage_ping_enabled' do
+ expect(setting.usage_ping_enabled).to be_falsey
+ end
+ end
+ end
+
+ context 'when the usage ping is enabled in gitlab.yml' do
+ before do
+ allow(Settings.gitlab).to receive(:usage_ping_enabled).and_return(true)
+ end
+
+ it 'allows the usage ping to be configured' do
+ expect(setting.usage_ping_can_be_configured?).to be_truthy
+ end
+
+ context 'when the usage ping is disabled in the DB' do
+ before do
+ setting.usage_ping_enabled = false
+ end
+
+ it 'returns false for usage_ping_enabled' do
+ expect(setting.usage_ping_enabled).to be_falsey
+ end
+ end
+
+ context 'when the usage ping is enabled in the DB' do
+ before do
+ setting.usage_ping_enabled = true
+ end
+
+ it 'returns true for usage_ping_enabled' do
+ expect(setting.usage_ping_enabled).to be_truthy
+ end
+ end
+ end
+ end
+
+ describe '#allowed_key_types' do
+ it 'includes all key types by default' do
+ expect(setting.allowed_key_types).to contain_exactly(*described_class::SUPPORTED_KEY_TYPES)
+ end
+
+ it 'excludes disabled key types' do
+ expect(setting.allowed_key_types).to include(:ed25519)
+
+ setting.ed25519_key_restriction = described_class::FORBIDDEN_KEY_VALUE
+
+ expect(setting.allowed_key_types).not_to include(:ed25519)
+ end
+ end
+
+ describe '#key_restriction_for' do
+ it 'returns the restriction value for recognised types' do
+ setting.rsa_key_restriction = 1024
+
+ expect(setting.key_restriction_for(:rsa)).to eq(1024)
+ end
+
+ it 'allows types to be passed as a string' do
+ setting.rsa_key_restriction = 1024
+
+ expect(setting.key_restriction_for('rsa')).to eq(1024)
+ end
+
+ it 'returns forbidden for unrecognised type' do
+ expect(setting.key_restriction_for(:foo)).to eq(described_class::FORBIDDEN_KEY_VALUE)
+ end
+ end
+
+ describe '#allow_signup?' do
+ it 'returns true' do
+ expect(setting.allow_signup?).to be_truthy
+ end
+
+ it 'returns false if signup is disabled' do
+ allow(setting).to receive(:signup_enabled?).and_return(false)
+
+ expect(setting.allow_signup?).to be_falsey
+ end
+
+ it 'returns false if password authentication is disabled for the web interface' do
+ allow(setting).to receive(:password_authentication_enabled_for_web?).and_return(false)
+
+ expect(setting.allow_signup?).to be_falsey
+ end
+ end
+
+ describe '#pick_repository_storage' do
+ it 'uses Array#sample to pick a random storage' do
+ array = double('array', sample: 'random')
+ expect(setting).to receive(:repository_storages).and_return(array)
+
+ expect(setting.pick_repository_storage).to eq('random')
+ end
+ end
+
+ describe '#user_default_internal_regex_enabled?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:user_default_external, :user_default_internal_regex, :result) do
+ false | nil | false
+ false | '' | false
+ false | '^(?:(?!\.ext@).)*$\r?\n?' | false
+ true | '' | false
+ true | nil | false
+ true | '^(?:(?!\.ext@).)*$\r?\n?' | true
+ end
+
+ with_them do
+ before do
+ setting.user_default_external = user_default_external
+ setting.user_default_internal_regex = user_default_internal_regex
+ end
+
+ subject { setting.user_default_internal_regex_enabled? }
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe '#archive_builds_older_than' do
+ subject { setting.archive_builds_older_than }
+
+ context 'when the archive_builds_in_seconds is set' do
+ before do
+ setting.archive_builds_in_seconds = 3600
+ end
+
+ it { is_expected.to be_within(1.minute).of(1.hour.ago) }
+ end
+
+ context 'when the archive_builds_in_seconds is set' do
+ before do
+ setting.archive_builds_in_seconds = nil
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#commit_email_hostname' do
+ context 'when the value is provided' do
+ before do
+ setting.commit_email_hostname = 'localhost'
+ end
+
+ it 'returns the provided value' do
+ expect(setting.commit_email_hostname).to eq('localhost')
+ end
+ end
+
+ context 'when the value is not provided' do
+ it 'returns the default from the class' do
+ expect(setting.commit_email_hostname)
+ .to eq(described_class.default_commit_email_hostname)
+ end
+ end
+ end
+
+ it 'predicate method changes when value is updated' do
+ setting.password_authentication_enabled_for_web = false
+
+ expect(setting.password_authentication_enabled_for_web?).to be_falsey
+ end
+end
diff --git a/spec/support/shared_examples/malicious_regexp_shared_examples.rb b/spec/support/shared_examples/malicious_regexp_shared_examples.rb
index db69b75c0c8..a86050e2cf2 100644
--- a/spec/support/shared_examples/malicious_regexp_shared_examples.rb
+++ b/spec/support/shared_examples/malicious_regexp_shared_examples.rb
@@ -2,7 +2,8 @@ require 'timeout'
shared_examples 'malicious regexp' do
let(:malicious_text) { 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!' }
- let(:malicious_regexp) { '(?i)^(([a-z])+.)+[A-Z]([a-z])+$' }
+ let(:malicious_regexp_re2) { '(?i)^(([a-z])+.)+[A-Z]([a-z])+$' }
+ let(:malicious_regexp_ruby) { '/^(([a-z])+.)+[A-Z]([a-z])+$/i' }
it 'takes under a second' do
expect { Timeout.timeout(1) { subject } }.not_to raise_error
diff --git a/spec/support/shared_examples/requests/api/merge_requests_list.rb b/spec/support/shared_examples/requests/api/merge_requests_list.rb
deleted file mode 100644
index 32e3b81c3c5..00000000000
--- a/spec/support/shared_examples/requests/api/merge_requests_list.rb
+++ /dev/null
@@ -1,366 +0,0 @@
-shared_examples 'merge requests list' do
- context 'when unauthenticated' do
- it 'returns merge requests for public projects' do
- get api(endpoint_path)
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- end
- end
-
- context 'when authenticated' do
- it 'avoids N+1 queries' do
- control = ActiveRecord::QueryRecorder.new do
- get api(endpoint_path, user)
- end
-
- create(:merge_request, state: 'closed', milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: 'Test', created_at: base_time)
-
- merge_request = create(:merge_request, milestone: milestone1, author: user, assignee: user, source_project: project, target_project: project, title: 'Test', created_at: base_time)
-
- merge_request.metrics.update!(merged_by: user,
- latest_closed_by: user,
- latest_closed_at: 1.hour.ago,
- merged_at: 2.hours.ago)
-
- expect do
- get api(endpoint_path, user)
- end.not_to exceed_query_limit(control)
- end
-
- it 'returns an array of all merge_requests' do
- get api(endpoint_path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
- expect(json_response.last['title']).to eq(merge_request.title)
- expect(json_response.last).to have_key('web_url')
- expect(json_response.last['sha']).to eq(merge_request.diff_head_sha)
- expect(json_response.last['merge_commit_sha']).to be_nil
- expect(json_response.last['merge_commit_sha']).to eq(merge_request.merge_commit_sha)
- expect(json_response.last['downvotes']).to eq(1)
- expect(json_response.last['upvotes']).to eq(1)
- expect(json_response.last['labels']).to eq([label2.title, label.title])
- expect(json_response.first['title']).to eq(merge_request_merged.title)
- expect(json_response.first['sha']).to eq(merge_request_merged.diff_head_sha)
- expect(json_response.first['merge_commit_sha']).not_to be_nil
- expect(json_response.first['merge_commit_sha']).to eq(merge_request_merged.merge_commit_sha)
- end
-
- it 'returns an array of all merge_requests using simple mode' do
- path = endpoint_path + '?view=simple'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response.last.keys).to match_array(%w(id iid title web_url created_at description project_id state updated_at))
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
- expect(json_response.last['iid']).to eq(merge_request.iid)
- expect(json_response.last['title']).to eq(merge_request.title)
- expect(json_response.last).to have_key('web_url')
- expect(json_response.first['iid']).to eq(merge_request_merged.iid)
- expect(json_response.first['title']).to eq(merge_request_merged.title)
- expect(json_response.first).to have_key('web_url')
- end
-
- it 'returns an array of all merge_requests' do
- path = endpoint_path + '?state'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
- expect(json_response.last['title']).to eq(merge_request.title)
- end
-
- it 'returns an array of open merge_requests' do
- path = endpoint_path + '?state=opened'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.last['title']).to eq(merge_request.title)
- end
-
- it 'returns an array of closed merge_requests' do
- path = endpoint_path + '?state=closed'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['title']).to eq(merge_request_closed.title)
- end
-
- it 'returns an array of merged merge_requests' do
- path = endpoint_path + '?state=merged'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['title']).to eq(merge_request_merged.title)
- end
-
- it 'matches V4 response schema' do
- get api(endpoint_path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to match_response_schema('public_api/v4/merge_requests')
- end
-
- it 'returns an empty array if no issue matches milestone' do
- get api(endpoint_path, user), params: { milestone: '1.0.0' }
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
- end
-
- it 'returns an empty array if milestone does not exist' do
- get api(endpoint_path, user), params: { milestone: 'foo' }
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
- end
-
- it 'returns an array of merge requests in given milestone' do
- get api(endpoint_path, user), params: { milestone: '0.9' }
-
- closed_issues = json_response.select { |mr| mr['id'] == merge_request_closed.id }
- expect(closed_issues.length).to eq(1)
- expect(closed_issues.first['title']).to eq merge_request_closed.title
- end
-
- it 'returns an array of merge requests matching state in milestone' do
- get api(endpoint_path, user), params: { milestone: '0.9', state: 'closed' }
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request_closed.id)
- end
-
- it 'returns an array of labeled merge requests' do
- path = endpoint_path + "?labels=#{label.title}"
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['labels']).to eq([label2.title, label.title])
- end
-
- it 'returns an array of labeled merge requests where all labels match' do
- path = endpoint_path + "?labels=#{label.title},foo,bar"
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
- end
-
- it 'returns an empty array if no merge request matches labels' do
- path = endpoint_path + '?labels=foo,bar'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(0)
- end
-
- it 'returns an array of labeled merge requests where all labels match' do
- path = endpoint_path + "?labels[]=#{label.title}&labels[]=#{label2.title}"
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['labels']).to eq([label2.title, label.title])
- end
-
- it 'returns an array of merge requests with any label when filtering by any label' do
- get api(endpoint_path, user), params: { labels: [" #{label.title} ", " #{label2.title} "] }
-
- expect_paginated_array_response
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['labels']).to eq([label2.title, label.title])
- expect(json_response.first['id']).to eq(merge_request.id)
- end
-
- it 'returns an array of merge requests with any label when filtering by any label' do
- get api(endpoint_path, user), params: { labels: ["#{label.title} , #{label2.title}"] }
-
- expect_paginated_array_response
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['labels']).to eq([label2.title, label.title])
- expect(json_response.first['id']).to eq(merge_request.id)
- end
-
- it 'returns an array of merge requests with any label when filtering by any label' do
- get api(endpoint_path, user), params: { labels: IssuesFinder::FILTER_ANY }
-
- expect_paginated_array_response
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(merge_request.id)
- end
-
- it 'returns an array of merge requests without a label when filtering by no label' do
- get api(endpoint_path, user), params: { labels: IssuesFinder::FILTER_NONE }
-
- response_ids = json_response.map { |merge_request| merge_request['id'] }
-
- expect_paginated_array_response
- expect(response_ids).to contain_exactly(merge_request_closed.id, merge_request_merged.id, merge_request_locked.id)
- end
-
- it 'returns an array of labeled merge requests that are merged for a milestone' do
- bug_label = create(:label, title: 'bug', color: '#FFAABB', project: project)
-
- mr1 = create(:merge_request, state: 'merged', source_project: project, target_project: project, milestone: milestone)
- mr2 = create(:merge_request, state: 'merged', source_project: project, target_project: project, milestone: milestone1)
- mr3 = create(:merge_request, state: 'closed', source_project: project, target_project: project, milestone: milestone1)
- _mr = create(:merge_request, state: 'merged', source_project: project, target_project: project, milestone: milestone1)
-
- create(:label_link, label: bug_label, target: mr1)
- create(:label_link, label: bug_label, target: mr2)
- create(:label_link, label: bug_label, target: mr3)
-
- path = endpoint_path + "?labels=#{bug_label.title}&milestone=#{milestone1.title}&state=merged"
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
- expect(json_response.first['id']).to eq(mr2.id)
- end
-
- context 'with ordering' do
- before do
- @mr_later = mr_with_later_created_and_updated_at_time
- @mr_earlier = mr_with_earlier_created_and_updated_at_time
- end
-
- it 'returns an array of merge_requests in ascending order' do
- path = endpoint_path + '?sort=asc'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
- response_dates = json_response.map { |merge_request| merge_request['created_at'] }
- expect(response_dates).to eq(response_dates.sort)
- end
-
- it 'returns an array of merge_requests in descending order' do
- path = endpoint_path + '?sort=desc'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
- response_dates = json_response.map { |merge_request| merge_request['created_at'] }
- expect(response_dates).to eq(response_dates.sort.reverse)
- end
-
- context '2 merge requests with equal created_at' do
- let!(:closed_mr2) do
- create :merge_request,
- state: 'closed',
- milestone: milestone1,
- author: user,
- assignee: user,
- source_project: project,
- target_project: project,
- title: "Test",
- created_at: @mr_earlier.created_at
- end
-
- it 'page breaks first page correctly' do
- get api("#{endpoint_path}?sort=desc&per_page=4", user)
-
- response_ids = json_response.map { |merge_request| merge_request['id'] }
-
- expect(response_ids).to include(closed_mr2.id)
- expect(response_ids).not_to include(@mr_earlier.id)
- end
-
- it 'page breaks second page correctly' do
- get api("#{endpoint_path}?sort=desc&per_page=4&page=2", user)
-
- response_ids = json_response.map { |merge_request| merge_request['id'] }
-
- expect(response_ids).not_to include(closed_mr2.id)
- expect(response_ids).to include(@mr_earlier.id)
- end
- end
-
- it 'returns an array of merge_requests ordered by updated_at' do
- path = endpoint_path + '?order_by=updated_at'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
- response_dates = json_response.map { |merge_request| merge_request['updated_at'] }
- expect(response_dates).to eq(response_dates.sort.reverse)
- end
-
- it 'returns an array of merge_requests ordered by created_at' do
- path = endpoint_path + '?order_by=created_at&sort=asc'
-
- get api(path, user)
-
- expect(response).to have_gitlab_http_status(200)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.length).to eq(4)
- response_dates = json_response.map { |merge_request| merge_request['created_at'] }
- expect(response_dates).to eq(response_dates.sort)
- end
- end
-
- context 'source_branch param' do
- it 'returns merge requests with the given source branch' do
- get api(endpoint_path, user), params: { source_branch: merge_request_closed.source_branch, state: 'all' }
-
- expect_response_contain_exactly(merge_request_closed, merge_request_merged, merge_request_locked)
- end
- end
-
- context 'target_branch param' do
- it 'returns merge requests with the given target branch' do
- get api(endpoint_path, user), params: { target_branch: merge_request_closed.target_branch, state: 'all' }
-
- expect_response_contain_exactly(merge_request_closed, merge_request_merged, merge_request_locked)
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/snippet_visibility.rb b/spec/support/shared_examples/snippet_visibility.rb
deleted file mode 100644
index 3a7c69b7877..00000000000
--- a/spec/support/shared_examples/snippet_visibility.rb
+++ /dev/null
@@ -1,322 +0,0 @@
-RSpec.shared_examples 'snippet visibility' do
- let!(:author) { create(:user) }
- let!(:member) { create(:user) }
- let!(:external) { create(:user, :external) }
-
- let!(:snippet_type_visibilities) do
- {
- public: Snippet::PUBLIC,
- internal: Snippet::INTERNAL,
- private: Snippet::PRIVATE
- }
- end
-
- context "For project snippets" do
- let!(:users) do
- {
- unauthenticated: nil,
- external: external,
- non_member: create(:user),
- member: member,
- author: author
- }
- end
-
- let!(:project_type_visibilities) do
- {
- public: Gitlab::VisibilityLevel::PUBLIC,
- internal: Gitlab::VisibilityLevel::INTERNAL,
- private: Gitlab::VisibilityLevel::PRIVATE
- }
- end
-
- let(:project_feature_visibilities) do
- {
- enabled: ProjectFeature::ENABLED,
- private: ProjectFeature::PRIVATE,
- disabled: ProjectFeature::DISABLED
- }
- end
-
- where(:project_type, :feature_visibility, :user_type, :snippet_type, :outcome) do
- [
- # Public projects
- [:public, :enabled, :unauthenticated, :public, true],
- [:public, :enabled, :unauthenticated, :internal, false],
- [:public, :enabled, :unauthenticated, :private, false],
-
- [:public, :enabled, :external, :public, true],
- [:public, :enabled, :external, :internal, false],
- [:public, :enabled, :external, :private, false],
-
- [:public, :enabled, :non_member, :public, true],
- [:public, :enabled, :non_member, :internal, true],
- [:public, :enabled, :non_member, :private, false],
-
- [:public, :enabled, :member, :public, true],
- [:public, :enabled, :member, :internal, true],
- [:public, :enabled, :member, :private, true],
-
- [:public, :enabled, :author, :public, true],
- [:public, :enabled, :author, :internal, true],
- [:public, :enabled, :author, :private, true],
-
- [:public, :private, :unauthenticated, :public, false],
- [:public, :private, :unauthenticated, :internal, false],
- [:public, :private, :unauthenticated, :private, false],
-
- [:public, :private, :external, :public, false],
- [:public, :private, :external, :internal, false],
- [:public, :private, :external, :private, false],
-
- [:public, :private, :non_member, :public, false],
- [:public, :private, :non_member, :internal, false],
- [:public, :private, :non_member, :private, false],
-
- [:public, :private, :member, :public, true],
- [:public, :private, :member, :internal, true],
- [:public, :private, :member, :private, true],
-
- [:public, :private, :author, :public, true],
- [:public, :private, :author, :internal, true],
- [:public, :private, :author, :private, true],
-
- [:public, :disabled, :unauthenticated, :public, false],
- [:public, :disabled, :unauthenticated, :internal, false],
- [:public, :disabled, :unauthenticated, :private, false],
-
- [:public, :disabled, :external, :public, false],
- [:public, :disabled, :external, :internal, false],
- [:public, :disabled, :external, :private, false],
-
- [:public, :disabled, :non_member, :public, false],
- [:public, :disabled, :non_member, :internal, false],
- [:public, :disabled, :non_member, :private, false],
-
- [:public, :disabled, :member, :public, false],
- [:public, :disabled, :member, :internal, false],
- [:public, :disabled, :member, :private, false],
-
- [:public, :disabled, :author, :public, false],
- [:public, :disabled, :author, :internal, false],
- [:public, :disabled, :author, :private, false],
-
- # Internal projects
- [:internal, :enabled, :unauthenticated, :public, false],
- [:internal, :enabled, :unauthenticated, :internal, false],
- [:internal, :enabled, :unauthenticated, :private, false],
-
- [:internal, :enabled, :external, :public, false],
- [:internal, :enabled, :external, :internal, false],
- [:internal, :enabled, :external, :private, false],
-
- [:internal, :enabled, :non_member, :public, true],
- [:internal, :enabled, :non_member, :internal, true],
- [:internal, :enabled, :non_member, :private, false],
-
- [:internal, :enabled, :member, :public, true],
- [:internal, :enabled, :member, :internal, true],
- [:internal, :enabled, :member, :private, true],
-
- [:internal, :enabled, :author, :public, true],
- [:internal, :enabled, :author, :internal, true],
- [:internal, :enabled, :author, :private, true],
-
- [:internal, :private, :unauthenticated, :public, false],
- [:internal, :private, :unauthenticated, :internal, false],
- [:internal, :private, :unauthenticated, :private, false],
-
- [:internal, :private, :external, :public, false],
- [:internal, :private, :external, :internal, false],
- [:internal, :private, :external, :private, false],
-
- [:internal, :private, :non_member, :public, false],
- [:internal, :private, :non_member, :internal, false],
- [:internal, :private, :non_member, :private, false],
-
- [:internal, :private, :member, :public, true],
- [:internal, :private, :member, :internal, true],
- [:internal, :private, :member, :private, true],
-
- [:internal, :private, :author, :public, true],
- [:internal, :private, :author, :internal, true],
- [:internal, :private, :author, :private, true],
-
- [:internal, :disabled, :unauthenticated, :public, false],
- [:internal, :disabled, :unauthenticated, :internal, false],
- [:internal, :disabled, :unauthenticated, :private, false],
-
- [:internal, :disabled, :external, :public, false],
- [:internal, :disabled, :external, :internal, false],
- [:internal, :disabled, :external, :private, false],
-
- [:internal, :disabled, :non_member, :public, false],
- [:internal, :disabled, :non_member, :internal, false],
- [:internal, :disabled, :non_member, :private, false],
-
- [:internal, :disabled, :member, :public, false],
- [:internal, :disabled, :member, :internal, false],
- [:internal, :disabled, :member, :private, false],
-
- [:internal, :disabled, :author, :public, false],
- [:internal, :disabled, :author, :internal, false],
- [:internal, :disabled, :author, :private, false],
-
- # Private projects
- [:private, :enabled, :unauthenticated, :public, false],
- [:private, :enabled, :unauthenticated, :internal, false],
- [:private, :enabled, :unauthenticated, :private, false],
-
- [:private, :enabled, :external, :public, true],
- [:private, :enabled, :external, :internal, true],
- [:private, :enabled, :external, :private, true],
-
- [:private, :enabled, :non_member, :public, false],
- [:private, :enabled, :non_member, :internal, false],
- [:private, :enabled, :non_member, :private, false],
-
- [:private, :enabled, :member, :public, true],
- [:private, :enabled, :member, :internal, true],
- [:private, :enabled, :member, :private, true],
-
- [:private, :enabled, :author, :public, true],
- [:private, :enabled, :author, :internal, true],
- [:private, :enabled, :author, :private, true],
-
- [:private, :private, :unauthenticated, :public, false],
- [:private, :private, :unauthenticated, :internal, false],
- [:private, :private, :unauthenticated, :private, false],
-
- [:private, :private, :external, :public, true],
- [:private, :private, :external, :internal, true],
- [:private, :private, :external, :private, true],
-
- [:private, :private, :non_member, :public, false],
- [:private, :private, :non_member, :internal, false],
- [:private, :private, :non_member, :private, false],
-
- [:private, :private, :member, :public, true],
- [:private, :private, :member, :internal, true],
- [:private, :private, :member, :private, true],
-
- [:private, :private, :author, :public, true],
- [:private, :private, :author, :internal, true],
- [:private, :private, :author, :private, true],
-
- [:private, :disabled, :unauthenticated, :public, false],
- [:private, :disabled, :unauthenticated, :internal, false],
- [:private, :disabled, :unauthenticated, :private, false],
-
- [:private, :disabled, :external, :public, false],
- [:private, :disabled, :external, :internal, false],
- [:private, :disabled, :external, :private, false],
-
- [:private, :disabled, :non_member, :public, false],
- [:private, :disabled, :non_member, :internal, false],
- [:private, :disabled, :non_member, :private, false],
-
- [:private, :disabled, :member, :public, false],
- [:private, :disabled, :member, :internal, false],
- [:private, :disabled, :member, :private, false],
-
- [:private, :disabled, :author, :public, false],
- [:private, :disabled, :author, :internal, false],
- [:private, :disabled, :author, :private, false]
- ]
- end
-
- with_them do
- let!(:project) { create(:project, visibility_level: project_type_visibilities[project_type]) }
- let!(:project_feature) { project.project_feature.update_column(:snippets_access_level, project_feature_visibilities[feature_visibility]) }
- let!(:user) { users[user_type] }
- let!(:snippet) { create(:project_snippet, visibility_level: snippet_type_visibilities[snippet_type], project: project, author: author) }
- let!(:members) do
- project.add_developer(author)
- project.add_developer(member)
- project.add_developer(external) if project.private?
- end
-
- context "For #{params[:project_type]} project and #{params[:user_type]} users" do
- it 'should agree with the read_project_snippet policy' do
- expect(can?(user, :read_project_snippet, snippet)).to eq(outcome)
- end
-
- it 'should return proper outcome' do
- results = described_class.new(user, project: project).execute
- expect(results.include?(snippet)).to eq(outcome)
- end
- end
-
- context "Without a given project and #{params[:user_type]} users" do
- it 'should return proper outcome' do
- results = described_class.new(user).execute
- expect(results.include?(snippet)).to eq(outcome)
- end
-
- it 'returns no snippets when the user cannot read cross project' do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
-
- snippets = described_class.new(user).execute
-
- expect(snippets).to be_empty
- end
- end
- end
- end
-
- context 'For personal snippets' do
- let!(:users) do
- {
- unauthenticated: nil,
- external: external,
- non_member: create(:user),
- author: author
- }
- end
-
- where(:snippet_visibility, :user_type, :outcome) do
- [
- [:public, :unauthenticated, true],
- [:public, :external, true],
- [:public, :non_member, true],
- [:public, :author, true],
-
- [:internal, :unauthenticated, false],
- [:internal, :external, false],
- [:internal, :non_member, true],
- [:internal, :author, true],
-
- [:private, :unauthenticated, false],
- [:private, :external, false],
- [:private, :non_member, false],
- [:private, :author, true]
- ]
- end
-
- with_them do
- let!(:user) { users[user_type] }
- let!(:snippet) { create(:personal_snippet, visibility_level: snippet_type_visibilities[snippet_visibility], author: author) }
-
- context "For personal and #{params[:snippet_visibility]} snippets with #{params[:user_type]} user" do
- it 'should agree with read_personal_snippet policy' do
- expect(can?(user, :read_personal_snippet, snippet)).to eq(outcome)
- end
-
- it 'should return proper outcome' do
- results = described_class.new(user).execute
- expect(results.include?(snippet)).to eq(outcome)
- end
-
- it 'should return personal snippets when the user cannot read cross project' do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
-
- results = described_class.new(user).execute
-
- expect(results.include?(snippet)).to eq(outcome)
- end
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/snippet_visibility_shared_examples.rb b/spec/support/shared_examples/snippet_visibility_shared_examples.rb
new file mode 100644
index 00000000000..4f662db2120
--- /dev/null
+++ b/spec/support/shared_examples/snippet_visibility_shared_examples.rb
@@ -0,0 +1,306 @@
+RSpec.shared_examples 'snippet visibility' do
+ using RSpec::Parameterized::TableSyntax
+
+ # Make sure no snippets exist prior to running the test matrix
+ before(:context) do
+ DatabaseCleaner.clean_with(:truncation)
+ end
+
+ set(:author) { create(:user) }
+ set(:member) { create(:user) }
+ set(:external) { create(:user, :external) }
+
+ context "For project snippets" do
+ let!(:users) do
+ {
+ unauthenticated: nil,
+ external: external,
+ non_member: create(:user),
+ member: member,
+ author: author
+ }
+ end
+
+ where(:project_type, :feature_visibility, :user_type, :snippet_type, :outcome) do
+ [
+ # Public projects
+ [:public, ProjectFeature::ENABLED, :unauthenticated, Snippet::PUBLIC, true],
+ [:public, ProjectFeature::ENABLED, :unauthenticated, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::ENABLED, :unauthenticated, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::ENABLED, :external, Snippet::PUBLIC, true],
+ [:public, ProjectFeature::ENABLED, :external, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::ENABLED, :external, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::ENABLED, :non_member, Snippet::PUBLIC, true],
+ [:public, ProjectFeature::ENABLED, :non_member, Snippet::INTERNAL, true],
+ [:public, ProjectFeature::ENABLED, :non_member, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::ENABLED, :member, Snippet::PUBLIC, true],
+ [:public, ProjectFeature::ENABLED, :member, Snippet::INTERNAL, true],
+ [:public, ProjectFeature::ENABLED, :member, Snippet::PRIVATE, true],
+
+ [:public, ProjectFeature::ENABLED, :author, Snippet::PUBLIC, true],
+ [:public, ProjectFeature::ENABLED, :author, Snippet::INTERNAL, true],
+ [:public, ProjectFeature::ENABLED, :author, Snippet::PRIVATE, true],
+
+ [:public, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PUBLIC, false],
+ [:public, ProjectFeature::PRIVATE, :unauthenticated, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::PRIVATE, :external, Snippet::PUBLIC, false],
+ [:public, ProjectFeature::PRIVATE, :external, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::PRIVATE, :external, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::PRIVATE, :non_member, Snippet::PUBLIC, false],
+ [:public, ProjectFeature::PRIVATE, :non_member, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::PRIVATE, :non_member, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::PRIVATE, :member, Snippet::PUBLIC, true],
+ [:public, ProjectFeature::PRIVATE, :member, Snippet::INTERNAL, true],
+ [:public, ProjectFeature::PRIVATE, :member, Snippet::PRIVATE, true],
+
+ [:public, ProjectFeature::PRIVATE, :author, Snippet::PUBLIC, true],
+ [:public, ProjectFeature::PRIVATE, :author, Snippet::INTERNAL, true],
+ [:public, ProjectFeature::PRIVATE, :author, Snippet::PRIVATE, true],
+
+ [:public, ProjectFeature::DISABLED, :unauthenticated, Snippet::PUBLIC, false],
+ [:public, ProjectFeature::DISABLED, :unauthenticated, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::DISABLED, :unauthenticated, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::DISABLED, :external, Snippet::PUBLIC, false],
+ [:public, ProjectFeature::DISABLED, :external, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::DISABLED, :external, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::DISABLED, :non_member, Snippet::PUBLIC, false],
+ [:public, ProjectFeature::DISABLED, :non_member, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::DISABLED, :non_member, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::DISABLED, :member, Snippet::PUBLIC, false],
+ [:public, ProjectFeature::DISABLED, :member, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::DISABLED, :member, Snippet::PRIVATE, false],
+
+ [:public, ProjectFeature::DISABLED, :author, Snippet::PUBLIC, false],
+ [:public, ProjectFeature::DISABLED, :author, Snippet::INTERNAL, false],
+ [:public, ProjectFeature::DISABLED, :author, Snippet::PRIVATE, false],
+
+ # Internal projects
+ [:internal, ProjectFeature::ENABLED, :unauthenticated, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::ENABLED, :unauthenticated, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::ENABLED, :unauthenticated, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::ENABLED, :external, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::ENABLED, :external, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::ENABLED, :external, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::ENABLED, :non_member, Snippet::PUBLIC, true],
+ [:internal, ProjectFeature::ENABLED, :non_member, Snippet::INTERNAL, true],
+ [:internal, ProjectFeature::ENABLED, :non_member, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::ENABLED, :member, Snippet::PUBLIC, true],
+ [:internal, ProjectFeature::ENABLED, :member, Snippet::INTERNAL, true],
+ [:internal, ProjectFeature::ENABLED, :member, Snippet::PRIVATE, true],
+
+ [:internal, ProjectFeature::ENABLED, :author, Snippet::PUBLIC, true],
+ [:internal, ProjectFeature::ENABLED, :author, Snippet::INTERNAL, true],
+ [:internal, ProjectFeature::ENABLED, :author, Snippet::PRIVATE, true],
+
+ [:internal, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::PRIVATE, :unauthenticated, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::PRIVATE, :external, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::PRIVATE, :external, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::PRIVATE, :external, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::PRIVATE, :non_member, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::PRIVATE, :non_member, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::PRIVATE, :non_member, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::PRIVATE, :member, Snippet::PUBLIC, true],
+ [:internal, ProjectFeature::PRIVATE, :member, Snippet::INTERNAL, true],
+ [:internal, ProjectFeature::PRIVATE, :member, Snippet::PRIVATE, true],
+
+ [:internal, ProjectFeature::PRIVATE, :author, Snippet::PUBLIC, true],
+ [:internal, ProjectFeature::PRIVATE, :author, Snippet::INTERNAL, true],
+ [:internal, ProjectFeature::PRIVATE, :author, Snippet::PRIVATE, true],
+
+ [:internal, ProjectFeature::DISABLED, :unauthenticated, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::DISABLED, :unauthenticated, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::DISABLED, :unauthenticated, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::DISABLED, :external, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::DISABLED, :external, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::DISABLED, :external, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::DISABLED, :non_member, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::DISABLED, :non_member, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::DISABLED, :non_member, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::DISABLED, :member, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::DISABLED, :member, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::DISABLED, :member, Snippet::PRIVATE, false],
+
+ [:internal, ProjectFeature::DISABLED, :author, Snippet::PUBLIC, false],
+ [:internal, ProjectFeature::DISABLED, :author, Snippet::INTERNAL, false],
+ [:internal, ProjectFeature::DISABLED, :author, Snippet::PRIVATE, false],
+
+ # Private projects
+ [:private, ProjectFeature::ENABLED, :unauthenticated, Snippet::PUBLIC, false],
+ [:private, ProjectFeature::ENABLED, :unauthenticated, Snippet::INTERNAL, false],
+ [:private, ProjectFeature::ENABLED, :unauthenticated, Snippet::PRIVATE, false],
+
+ [:private, ProjectFeature::ENABLED, :external, Snippet::PUBLIC, true],
+ [:private, ProjectFeature::ENABLED, :external, Snippet::INTERNAL, true],
+ [:private, ProjectFeature::ENABLED, :external, Snippet::PRIVATE, true],
+
+ [:private, ProjectFeature::ENABLED, :non_member, Snippet::PUBLIC, false],
+ [:private, ProjectFeature::ENABLED, :non_member, Snippet::INTERNAL, false],
+ [:private, ProjectFeature::ENABLED, :non_member, Snippet::PRIVATE, false],
+
+ [:private, ProjectFeature::ENABLED, :member, Snippet::PUBLIC, true],
+ [:private, ProjectFeature::ENABLED, :member, Snippet::INTERNAL, true],
+ [:private, ProjectFeature::ENABLED, :member, Snippet::PRIVATE, true],
+
+ [:private, ProjectFeature::ENABLED, :author, Snippet::PUBLIC, true],
+ [:private, ProjectFeature::ENABLED, :author, Snippet::INTERNAL, true],
+ [:private, ProjectFeature::ENABLED, :author, Snippet::PRIVATE, true],
+
+ [:private, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PUBLIC, false],
+ [:private, ProjectFeature::PRIVATE, :unauthenticated, Snippet::INTERNAL, false],
+ [:private, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PRIVATE, false],
+
+ [:private, ProjectFeature::PRIVATE, :external, Snippet::PUBLIC, true],
+ [:private, ProjectFeature::PRIVATE, :external, Snippet::INTERNAL, true],
+ [:private, ProjectFeature::PRIVATE, :external, Snippet::PRIVATE, true],
+
+ [:private, ProjectFeature::PRIVATE, :non_member, Snippet::PUBLIC, false],
+ [:private, ProjectFeature::PRIVATE, :non_member, Snippet::INTERNAL, false],
+ [:private, ProjectFeature::PRIVATE, :non_member, Snippet::PRIVATE, false],
+
+ [:private, ProjectFeature::PRIVATE, :member, Snippet::PUBLIC, true],
+ [:private, ProjectFeature::PRIVATE, :member, Snippet::INTERNAL, true],
+ [:private, ProjectFeature::PRIVATE, :member, Snippet::PRIVATE, true],
+
+ [:private, ProjectFeature::PRIVATE, :author, Snippet::PUBLIC, true],
+ [:private, ProjectFeature::PRIVATE, :author, Snippet::INTERNAL, true],
+ [:private, ProjectFeature::PRIVATE, :author, Snippet::PRIVATE, true],
+
+ [:private, ProjectFeature::DISABLED, :unauthenticated, Snippet::PUBLIC, false],
+ [:private, ProjectFeature::DISABLED, :unauthenticated, Snippet::INTERNAL, false],
+ [:private, ProjectFeature::DISABLED, :unauthenticated, Snippet::PRIVATE, false],
+
+ [:private, ProjectFeature::DISABLED, :external, Snippet::PUBLIC, false],
+ [:private, ProjectFeature::DISABLED, :external, Snippet::INTERNAL, false],
+ [:private, ProjectFeature::DISABLED, :external, Snippet::PRIVATE, false],
+
+ [:private, ProjectFeature::DISABLED, :non_member, Snippet::PUBLIC, false],
+ [:private, ProjectFeature::DISABLED, :non_member, Snippet::INTERNAL, false],
+ [:private, ProjectFeature::DISABLED, :non_member, Snippet::PRIVATE, false],
+
+ [:private, ProjectFeature::DISABLED, :member, Snippet::PUBLIC, false],
+ [:private, ProjectFeature::DISABLED, :member, Snippet::INTERNAL, false],
+ [:private, ProjectFeature::DISABLED, :member, Snippet::PRIVATE, false],
+
+ [:private, ProjectFeature::DISABLED, :author, Snippet::PUBLIC, false],
+ [:private, ProjectFeature::DISABLED, :author, Snippet::INTERNAL, false],
+ [:private, ProjectFeature::DISABLED, :author, Snippet::PRIVATE, false]
+ ]
+ end
+
+ with_them do
+ let!(:project) { create(:project, visibility_level: Gitlab::VisibilityLevel.level_value(project_type.to_s)) }
+ let!(:project_feature) { project.project_feature.update_column(:snippets_access_level, feature_visibility) }
+ let!(:user) { users[user_type] }
+ let!(:snippet) { create(:project_snippet, visibility_level: snippet_type, project: project, author: author) }
+ let!(:members) do
+ project.add_developer(author)
+ project.add_developer(member)
+ project.add_developer(external) if project.private?
+ end
+
+ context "For #{params[:project_type]} project and #{params[:user_type]} users" do
+ it 'should agree with the read_project_snippet policy' do
+ expect(can?(user, :read_project_snippet, snippet)).to eq(outcome)
+ end
+
+ it 'should return proper outcome' do
+ results = described_class.new(user, project: project).execute
+
+ expect(results.include?(snippet)).to eq(outcome)
+ end
+ end
+
+ context "Without a given project and #{params[:user_type]} users" do
+ it 'should return proper outcome' do
+ results = described_class.new(user).execute
+ expect(results.include?(snippet)).to eq(outcome)
+ end
+
+ it 'returns no snippets when the user cannot read cross project' do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
+
+ snippets = described_class.new(user).execute
+
+ expect(snippets).to be_empty
+ end
+ end
+ end
+ end
+
+ context 'For personal snippets' do
+ let!(:users) do
+ {
+ unauthenticated: nil,
+ external: external,
+ non_member: create(:user),
+ author: author
+ }
+ end
+
+ where(:snippet_visibility, :user_type, :outcome) do
+ [
+ [Snippet::PUBLIC, :unauthenticated, true],
+ [Snippet::PUBLIC, :external, true],
+ [Snippet::PUBLIC, :non_member, true],
+ [Snippet::PUBLIC, :author, true],
+
+ [Snippet::INTERNAL, :unauthenticated, false],
+ [Snippet::INTERNAL, :external, false],
+ [Snippet::INTERNAL, :non_member, true],
+ [Snippet::INTERNAL, :author, true],
+
+ [Snippet::PRIVATE, :unauthenticated, false],
+ [Snippet::PRIVATE, :external, false],
+ [Snippet::PRIVATE, :non_member, false],
+ [Snippet::PRIVATE, :author, true]
+ ]
+ end
+
+ with_them do
+ let!(:user) { users[user_type] }
+ let!(:snippet) { create(:personal_snippet, visibility_level: snippet_visibility, author: author) }
+
+ context "For personal and #{params[:snippet_visibility]} snippets with #{params[:user_type]} user" do
+ it 'should agree with read_personal_snippet policy' do
+ expect(can?(user, :read_personal_snippet, snippet)).to eq(outcome)
+ end
+
+ it 'should return proper outcome' do
+ results = described_class.new(user).execute
+ expect(results.include?(snippet)).to eq(outcome)
+ end
+
+ it 'should return personal snippets when the user cannot read cross project' do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
+
+ results = described_class.new(user).execute
+
+ expect(results.include?(snippet)).to eq(outcome)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index a8fae4a88a3..bdbd39475b9 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -21,9 +21,6 @@ describe 'gitlab:app namespace rake task' do
# empty task as env is already loaded
Rake::Task.define_task :environment
-
- # We need this directory to run `gitlab:backup:create` task
- FileUtils.mkdir_p('public/uploads')
end
before do
@@ -38,6 +35,7 @@ describe 'gitlab:app namespace rake task' do
end
def run_rake_task(task_name)
+ FileUtils.mkdir_p('tmp/tests/public/uploads')
Rake::Task[task_name].reenable
Rake.application.invoke_task task_name
end
diff --git a/spec/tasks/gitlab/storage_rake_spec.rb b/spec/tasks/gitlab/storage_rake_spec.rb
index 6b50670c3c0..4b04d9cec39 100644
--- a/spec/tasks/gitlab/storage_rake_spec.rb
+++ b/spec/tasks/gitlab/storage_rake_spec.rb
@@ -1,6 +1,6 @@
require 'rake_helper'
-describe 'rake gitlab:storage:*' do
+describe 'rake gitlab:storage:*', :sidekiq do
before do
Rake.application.rake_require 'tasks/gitlab/storage'
@@ -43,9 +43,7 @@ describe 'rake gitlab:storage:*' do
end
end
- describe 'gitlab:storage:migrate_to_hashed' do
- let(:task) { 'gitlab:storage:migrate_to_hashed' }
-
+ shared_examples "make sure database is writable" do
context 'read-only database' do
it 'does nothing' do
expect(Gitlab::Database).to receive(:read_only?).and_return(true)
@@ -55,48 +53,68 @@ describe 'rake gitlab:storage:*' do
expect { run_rake_task(task) }.to output(/This task requires database write access. Exiting./).to_stderr
end
end
+ end
- context '0 legacy projects' do
- it 'does nothing' do
- expect(::HashedStorage::MigratorWorker).not_to receive(:perform_async)
+ shared_examples "handles custom BATCH env var" do |worker_klass|
+ context 'in batches of 1' do
+ before do
+ stub_env('BATCH' => 1)
+ end
+
+ it "enqueues one #{worker_klass} per project" do
+ projects.each do |project|
+ expect(worker_klass).to receive(:perform_async).with(project.id, project.id)
+ end
run_rake_task(task)
end
end
- context '3 legacy projects' do
- let(:projects) { create_list(:project, 3, :legacy_storage) }
+ context 'in batches of 2' do
+ before do
+ stub_env('BATCH' => 2)
+ end
- context 'in batches of 1' do
- before do
- stub_env('BATCH' => 1)
+ it "enqueues one #{worker_klass} per 2 projects" do
+ projects.map(&:id).sort.each_slice(2) do |first, last|
+ last ||= first
+ expect(worker_klass).to receive(:perform_async).with(first, last)
end
- it 'enqueues one HashedStorage::MigratorWorker per project' do
- projects.each do |project|
- expect(::HashedStorage::MigratorWorker).to receive(:perform_async).with(project.id, project.id)
- end
-
- run_rake_task(task)
- end
+ run_rake_task(task)
end
+ end
+ end
- context 'in batches of 2' do
- before do
- stub_env('BATCH' => 2)
- end
+ describe 'gitlab:storage:migrate_to_hashed' do
+ let(:task) { 'gitlab:storage:migrate_to_hashed' }
- it 'enqueues one HashedStorage::MigratorWorker per 2 projects' do
- projects.map(&:id).sort.each_slice(2) do |first, last|
- last ||= first
- expect(::HashedStorage::MigratorWorker).to receive(:perform_async).with(first, last)
- end
+ context 'with rollback already scheduled', :redis do
+ it 'does nothing' do
+ Sidekiq::Testing.disable! do
+ ::HashedStorage::RollbackerWorker.perform_async(1, 5)
+
+ expect(Project).not_to receive(:with_unmigrated_storage)
- run_rake_task(task)
+ expect { run_rake_task(task) }.to output(/There is already a rollback operation in progress/).to_stderr
end
end
end
+ context 'with 0 legacy projects' do
+ it 'does nothing' do
+ expect(::HashedStorage::MigratorWorker).not_to receive(:perform_async)
+
+ run_rake_task(task)
+ end
+ end
+
+ context 'with 3 legacy projects' do
+ let(:projects) { create_list(:project, 3, :legacy_storage) }
+
+ it_behaves_like "handles custom BATCH env var", ::HashedStorage::MigratorWorker
+ end
+
context 'with same id in range' do
it 'displays message when project cant be found' do
stub_env('ID_FROM', 99999)
@@ -123,6 +141,38 @@ describe 'rake gitlab:storage:*' do
end
end
+ describe 'gitlab:storage:rollback_to_legacy' do
+ let(:task) { 'gitlab:storage:rollback_to_legacy' }
+
+ it_behaves_like 'make sure database is writable'
+
+ context 'with migration already scheduled', :redis do
+ it 'does nothing' do
+ Sidekiq::Testing.disable! do
+ ::HashedStorage::MigratorWorker.perform_async(1, 5)
+
+ expect(Project).not_to receive(:with_unmigrated_storage)
+
+ expect { run_rake_task(task) }.to output(/There is already a migration operation in progress/).to_stderr
+ end
+ end
+ end
+
+ context 'with 0 hashed projects' do
+ it 'does nothing' do
+ expect(::HashedStorage::RollbackerWorker).not_to receive(:perform_async)
+
+ run_rake_task(task)
+ end
+ end
+
+ context 'with 3 hashed projects' do
+ let(:projects) { create_list(:project, 3) }
+
+ it_behaves_like "handles custom BATCH env var", ::HashedStorage::RollbackerWorker
+ end
+ end
+
describe 'gitlab:storage:legacy_projects' do
it_behaves_like 'rake entities summary', 'projects', 'Legacy' do
let(:task) { 'gitlab:storage:legacy_projects' }
diff --git a/spec/validators/devise_email_validator_spec.rb b/spec/validators/devise_email_validator_spec.rb
new file mode 100644
index 00000000000..7860b659bd3
--- /dev/null
+++ b/spec/validators/devise_email_validator_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe DeviseEmailValidator do
+ let!(:user) { build(:user, public_email: 'test@example.com') }
+ subject { validator.validate(user) }
+
+ describe 'validations' do
+ context 'by default' do
+ let(:validator) { described_class.new(attributes: [:public_email]) }
+
+ it 'allows when email is valid' do
+ subject
+
+ expect(user.errors).to be_empty
+ end
+
+ it 'returns error when email is invalid' do
+ user.public_email = 'invalid'
+
+ subject
+
+ expect(user.errors).to be_present
+ expect(user.errors.first[1]).to eq 'is invalid'
+ end
+
+ it 'returns error when email is nil' do
+ user.public_email = nil
+
+ subject
+
+ expect(user.errors).to be_present
+ end
+
+ it 'returns error when email is blank' do
+ user.public_email = ''
+
+ subject
+
+ expect(user.errors).to be_present
+ expect(user.errors.first[1]).to eq 'is invalid'
+ end
+ end
+ end
+
+ context 'when regexp is set as Regexp' do
+ let(:validator) { described_class.new(attributes: [:public_email], regexp: /[0-9]/) }
+
+ it 'allows when value match' do
+ user.public_email = '1'
+
+ subject
+
+ expect(user.errors).to be_empty
+ end
+
+ it 'returns error when value does not match' do
+ subject
+
+ expect(user.errors).to be_present
+ end
+ end
+
+ context 'when regexp is set as String' do
+ it 'raise argument error' do
+ expect { described_class.new( { regexp: 'something' } ) }.to raise_error ArgumentError
+ end
+ end
+
+ context 'when allow_nil is set to true' do
+ let(:validator) { described_class.new(attributes: [:public_email], allow_nil: true) }
+
+ it 'allows when email is nil' do
+ user.public_email = nil
+
+ subject
+
+ expect(user.errors).to be_empty
+ end
+ end
+
+ context 'when allow_blank is set to true' do
+ let(:validator) { described_class.new(attributes: [:public_email], allow_blank: true) }
+
+ it 'allows when email is blank' do
+ user.public_email = ''
+
+ subject
+
+ expect(user.errors).to be_empty
+ end
+ end
+end
diff --git a/spec/validators/sha_validator_spec.rb b/spec/validators/sha_validator_spec.rb
index dc1539cf318..0a76570f65e 100644
--- a/spec/validators/sha_validator_spec.rb
+++ b/spec/validators/sha_validator_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
describe ShaValidator do
let(:validator) { described_class.new(attributes: [:base_commit_sha]) }
- let(:merge_diff) { build(:merge_request_diff) }
+ let!(:merge_diff) { build(:merge_request_diff) }
subject { validator.validate_each(merge_diff, :base_commit_sha, value) }
@@ -12,6 +12,8 @@ describe ShaValidator do
let(:value) { nil }
it 'does not add any error if value is empty' do
+ expect(Commit).not_to receive(:valid_hash?)
+
subject
expect(merge_diff.errors).to be_empty
@@ -21,7 +23,9 @@ describe ShaValidator do
context 'with valid sha' do
let(:value) { Digest::SHA1.hexdigest(SecureRandom.hex) }
- it 'does not add any error if value is empty' do
+ it 'does not add any error' do
+ expect(Commit).to receive(:valid_hash?).and_call_original
+
subject
expect(merge_diff.errors).to be_empty
@@ -32,6 +36,7 @@ describe ShaValidator do
let(:value) { 'foo' }
it 'adds error to the record' do
+ expect(Commit).to receive(:valid_hash?).and_call_original
expect(merge_diff.errors).to be_empty
subject
diff --git a/spec/views/groups/_home_panel.html.haml_spec.rb b/spec/views/groups/_home_panel.html.haml_spec.rb
new file mode 100644
index 00000000000..91c5ca261b9
--- /dev/null
+++ b/spec/views/groups/_home_panel.html.haml_spec.rb
@@ -0,0 +1,15 @@
+require 'spec_helper'
+
+describe 'groups/_home_panel' do
+ let(:group) { create(:group) }
+
+ before do
+ assign(:group, group)
+ end
+
+ it 'renders the group ID' do
+ render
+
+ expect(rendered).to have_content("Group ID: #{group.id}")
+ end
+end
diff --git a/spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb b/spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb
index 2a2539c80b5..b52fc719a64 100644
--- a/spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb
+++ b/spec/views/projects/settings/ci_cd/_autodevops_form.html.haml_spec.rb
@@ -5,6 +5,7 @@ describe 'projects/settings/ci_cd/_autodevops_form' do
before do
assign :project, project
+ allow(view).to receive(:auto_devops_enabled) { true }
end
it 'shows a warning message about Kubernetes cluster' do
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index 1bca8bba940..6762fe3759b 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -18,6 +18,7 @@ describe 'projects/settings/operations/show' do
allow(view).to receive(:error_tracking_setting)
.and_return(error_tracking_setting)
allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:incident_management_available?) { false }
end
let!(:error_tracking_setting) do
diff --git a/spec/workers/ci/build_prepare_worker_spec.rb b/spec/workers/ci/build_prepare_worker_spec.rb
new file mode 100644
index 00000000000..9f76696ee66
--- /dev/null
+++ b/spec/workers/ci/build_prepare_worker_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::BuildPrepareWorker do
+ subject { described_class.new.perform(build_id) }
+
+ context 'build exists' do
+ let(:build) { create(:ci_build) }
+ let(:build_id) { build.id }
+ let(:service) { double(execute: true) }
+
+ it 'calls the prepare build service' do
+ expect(Ci::PrepareBuildService).to receive(:new).with(build).and_return(service)
+ expect(service).to receive(:execute).once
+
+ subject
+ end
+ end
+
+ context 'build does not exist' do
+ let(:build_id) { -1 }
+
+ it 'does not attempt to prepare the build' do
+ expect(Ci::PrepareBuildService).not_to receive(:new)
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/cluster_configure_worker_spec.rb b/spec/workers/cluster_configure_worker_spec.rb
index 6918ee3d7d8..83f76809435 100644
--- a/spec/workers/cluster_configure_worker_spec.rb
+++ b/spec/workers/cluster_configure_worker_spec.rb
@@ -4,6 +4,11 @@ require 'spec_helper'
describe ClusterConfigureWorker, '#perform' do
let(:worker) { described_class.new }
+ let(:ci_preparing_state_enabled) { false }
+
+ before do
+ stub_feature_flags(ci_preparing_state: ci_preparing_state_enabled)
+ end
context 'when group cluster' do
let(:cluster) { create(:cluster, :group, :provided_by_gcp) }
@@ -66,4 +71,15 @@ describe ClusterConfigureWorker, '#perform' do
described_class.new.perform(123)
end
end
+
+ context 'ci_preparing_state feature is enabled' do
+ let(:cluster) { create(:cluster) }
+ let(:ci_preparing_state_enabled) { true }
+
+ it 'does not configure the cluster' do
+ expect(Clusters::RefreshService).not_to receive(:create_or_update_namespaces_for_cluster)
+
+ described_class.new.perform(cluster.id)
+ end
+ end
end
diff --git a/spec/workers/cluster_project_configure_worker_spec.rb b/spec/workers/cluster_project_configure_worker_spec.rb
new file mode 100644
index 00000000000..afdea55adf4
--- /dev/null
+++ b/spec/workers/cluster_project_configure_worker_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ClusterProjectConfigureWorker, '#perform' do
+ let(:worker) { described_class.new }
+
+ context 'ci_preparing_state feature is enabled' do
+ let(:cluster) { create(:cluster) }
+
+ before do
+ stub_feature_flags(ci_preparing_state: true)
+ end
+
+ it 'does not configure the cluster' do
+ expect(Clusters::RefreshService).not_to receive(:create_or_update_namespaces_for_project)
+
+ described_class.new.perform(cluster.id)
+ end
+ end
+end