summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--app/assets/javascripts/pipelines/components/pipelines_list/empty_state.vue54
-rw-r--r--app/controllers/import/bulk_imports_controller.rb23
-rw-r--r--app/controllers/projects/pipelines_controller.rb21
-rw-r--r--app/helpers/ci/pipelines_helper.rb4
-rw-r--r--app/models/project.rb9
-rw-r--r--app/services/ci/create_pipeline_service.rb1
-rw-r--r--app/services/pages/migrate_legacy_storage_to_deployment_service.rb52
-rw-r--r--app/services/pages/zip_directory_service.rb40
-rw-r--r--app/views/projects/pipelines/index.html.haml2
-rw-r--r--changelogs/unreleased/292680-refactor-users_controller_spec-rb-ssh_keys.yml5
-rw-r--r--changelogs/unreleased/expose-resolved_at-via-api.yml5
-rw-r--r--config/feature_flags/development/pages_migration_mark_as_not_deployed.yml8
-rw-r--r--doc/api/discussions.md3
-rw-r--r--doc/api/graphql/reference/gitlab_schema.graphql5
-rw-r--r--doc/api/graphql/reference/gitlab_schema.json14
-rw-r--r--doc/api/graphql/reference/index.md1
-rw-r--r--doc/api/scim.md10
-rw-r--r--doc/ci/merge_request_pipelines/img/pipeline-fork_v13_7.pngbin0 -> 54285 bytes
-rw-r--r--doc/ci/merge_request_pipelines/index.md4
-rw-r--r--doc/development/experiment_guide/index.md21
-rw-r--r--doc/topics/autodevops/upgrading_auto_deploy_dependencies.md4
-rw-r--r--doc/user/admin_area/settings/continuous_integration.md2
-rw-r--r--doc/user/group/saml_sso/scim_setup.md6
-rw-r--r--doc/user/infrastructure/terraform_state.md4
-rw-r--r--doc/user/project/service_desk.md5
-rw-r--r--lib/api/entities/note.rb1
-rw-r--r--lib/gitlab/experimentation.rb3
-rw-r--r--lib/gitlab/experimentation/controller_concern.rb5
-rw-r--r--locale/gitlab.pot6
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb20
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb66
-rw-r--r--spec/controllers/users_controller_spec.rb137
-rw-r--r--spec/features/groups/import_export/connect_instance_spec.rb2
-rw-r--r--spec/fixtures/api/schemas/entities/discussion.json1
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/notes.json1
-rw-r--r--spec/frontend/pipelines/empty_state_spec.js105
-rw-r--r--spec/helpers/ci/pipelines_helper_spec.rb19
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb10
-rw-r--r--spec/models/project_spec.rb37
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb1
-rw-r--r--spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb101
-rw-r--r--spec/services/pages/zip_directory_service_spec.rb286
-rw-r--r--spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb2
-rw-r--r--spec/workers/experiments/record_conversion_event_worker_spec.rb10
44 files changed, 849 insertions, 267 deletions
diff --git a/app/assets/javascripts/pipelines/components/pipelines_list/empty_state.vue b/app/assets/javascripts/pipelines/components/pipelines_list/empty_state.vue
index 78b69073cd3..ee26ea2f007 100644
--- a/app/assets/javascripts/pipelines/components/pipelines_list/empty_state.vue
+++ b/app/assets/javascripts/pipelines/components/pipelines_list/empty_state.vue
@@ -1,7 +1,25 @@
<script>
import { GlButton } from '@gitlab/ui';
+import { isExperimentEnabled } from '~/lib/utils/experimentation';
+import { s__ } from '~/locale';
+import Tracking from '~/tracking';
export default {
+ i18n: {
+ control: {
+ infoMessage: s__(`Pipelines|Continuous Integration can help
+ catch bugs by running your tests automatically,
+ while Continuous Deployment can help you deliver
+ code to your product environment.`),
+ buttonMessage: s__('Pipelines|Get started with Pipelines'),
+ },
+ experiment: {
+ infoMessage: s__(`Pipelines|GitLab CI/CD can automatically build,
+ test, and deploy your code. Let GitLab take care of time
+ consuming tasks, so you can spend more time creating.`),
+ buttonMessage: s__('Pipelines|Get started with CI/CD'),
+ },
+ },
name: 'PipelinesEmptyState',
components: {
GlButton,
@@ -20,6 +38,23 @@ export default {
required: true,
},
},
+ mounted() {
+ this.track('viewed');
+ },
+ methods: {
+ track(action) {
+ if (!gon.tracking_data) {
+ return;
+ }
+
+ const { category, value, label, property } = gon.tracking_data;
+
+ Tracking.event(category, action, { value, label, property });
+ },
+ isExperimentEnabled() {
+ return isExperimentEnabled('pipelinesEmptyState');
+ },
+ },
};
</script>
<template>
@@ -29,18 +64,16 @@ export default {
</div>
<div class="col-12">
- <div class="gl-text-content">
+ <div class="text-content">
<template v-if="canSetCi">
- <h4 class="gl-text-center" data-testid="header-text">
+ <h4 data-testid="header-text" class="gl-text-center">
{{ s__('Pipelines|Build with confidence') }}
</h4>
-
<p data-testid="info-text">
{{
- s__(`Pipelines|Continuous Integration can help
- catch bugs by running your tests automatically,
- while Continuous Deployment can help you deliver
- code to your product environment.`)
+ isExperimentEnabled()
+ ? $options.i18n.experiment.infoMessage
+ : $options.i18n.control.infoMessage
}}
</p>
@@ -50,8 +83,13 @@ export default {
variant="info"
category="primary"
data-testid="get-started-pipelines"
+ @click="track('documentation_clicked')"
>
- {{ s__('Pipelines|Get started with Pipelines') }}
+ {{
+ isExperimentEnabled()
+ ? $options.i18n.experiment.buttonMessage
+ : $options.i18n.control.buttonMessage
+ }}
</gl-button>
</div>
</template>
diff --git a/app/controllers/import/bulk_imports_controller.rb b/app/controllers/import/bulk_imports_controller.rb
index 4417cfe9098..e5e98dd44a4 100644
--- a/app/controllers/import/bulk_imports_controller.rb
+++ b/app/controllers/import/bulk_imports_controller.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class Import::BulkImportsController < ApplicationController
+ include ActionView::Helpers::SanitizeHelper
+
before_action :ensure_group_import_enabled
before_action :verify_blocked_uri, only: :status
@@ -43,7 +45,22 @@ class Import::BulkImportsController < ApplicationController
end
def importable_data
- client.get('groups', top_level_only: true).parsed_response
+ client.get('groups', query_params).parsed_response
+ end
+
+ # Default query string params used to fetch groups from GitLab source instance
+ #
+ # top_level_only: fetch only top level groups (subgroups are fetched during import itself)
+ # min_access_level: fetch only groups user has maintainer or above permissions
+ # search: optional search param to search user's groups by a keyword
+ def query_params
+ query_params = {
+ top_level_only: true,
+ min_access_level: Gitlab::Access::MAINTAINER
+ }
+
+ query_params[:search] = sanitized_filter_param if sanitized_filter_param
+ query_params
end
def client
@@ -131,4 +148,8 @@ class Import::BulkImportsController < ApplicationController
access_token: session[access_token_key]
}
end
+
+ def sanitized_filter_param
+ @filter ||= sanitize(params[:filter])&.downcase
+ end
end
diff --git a/app/controllers/projects/pipelines_controller.rb b/app/controllers/projects/pipelines_controller.rb
index 74513da8675..832f850121e 100644
--- a/app/controllers/projects/pipelines_controller.rb
+++ b/app/controllers/projects/pipelines_controller.rb
@@ -21,6 +21,7 @@ class Projects::PipelinesController < Projects::ApplicationController
push_frontend_feature_flag(:new_pipeline_form_prefilled_vars, project, type: :development, default_enabled: true)
end
before_action :ensure_pipeline, only: [:show]
+ before_action :push_experiment_to_gon, only: :index, if: :html_request?
# Will be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/225596
before_action :redirect_for_legacy_scope_filter, only: [:index], if: -> { request.format.html? }
@@ -45,7 +46,11 @@ class Projects::PipelinesController < Projects::ApplicationController
@pipelines_count = limited_pipelines_count(project)
respond_to do |format|
- format.html
+ format.html do
+ record_empty_pipeline_experiment
+
+ render :index
+ end
format.json do
Gitlab::PollingInterval.set_header(response, interval: POLLING_INTERVAL)
@@ -313,6 +318,20 @@ class Projects::PipelinesController < Projects::ApplicationController
def index_params
params.permit(:scope, :username, :ref, :status)
end
+
+ def record_empty_pipeline_experiment
+ return unless @pipelines_count.to_i == 0
+ return if helpers.has_gitlab_ci?(@project)
+
+ record_experiment_user(:pipelines_empty_state)
+ end
+
+ def push_experiment_to_gon
+ return unless current_user
+
+ push_frontend_experiment(:pipelines_empty_state, subject: current_user)
+ frontend_experimentation_tracking_data(:pipelines_empty_state, 'view', project.namespace_id, subject: current_user)
+ end
end
Projects::PipelinesController.prepend_if_ee('EE::Projects::PipelinesController')
diff --git a/app/helpers/ci/pipelines_helper.rb b/app/helpers/ci/pipelines_helper.rb
index 309aa477108..8a6f0821dbb 100644
--- a/app/helpers/ci/pipelines_helper.rb
+++ b/app/helpers/ci/pipelines_helper.rb
@@ -26,6 +26,10 @@ module Ci
_("%{message} showing first %{warnings_displayed}") % { message: message, warnings_displayed: MAX_LIMIT }
end
+ def has_gitlab_ci?(project)
+ project.has_ci? && project.builds_enabled?
+ end
+
private
def warning_markdown(pipeline)
diff --git a/app/models/project.rb b/app/models/project.rb
index c72cad54834..ddc2277e8d4 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -1829,6 +1829,15 @@ class Project < ApplicationRecord
ensure_pages_metadatum.update!(pages_deployment: deployment)
end
+ def set_first_pages_deployment!(deployment)
+ ensure_pages_metadatum
+
+ # where().update_all to perform update in the single transaction with check for null
+ ProjectPagesMetadatum
+ .where(project_id: id, pages_deployment_id: nil)
+ .update_all(pages_deployment_id: deployment.id)
+ end
+
def write_repository_config(gl_full_path: full_path)
# We'd need to keep track of project full path otherwise directory tree
# created with hashed storage enabled cannot be usefully imported using
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 44cf8b53d0d..64545e993a6 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -121,6 +121,7 @@ module Ci
def record_conversion_event
Experiments::RecordConversionEventWorker.perform_async(:ci_syntax_templates, current_user.id)
+ Experiments::RecordConversionEventWorker.perform_async(:pipelines_empty_state, current_user.id)
end
def extra_options(content: nil, dry_run: false)
diff --git a/app/services/pages/migrate_legacy_storage_to_deployment_service.rb b/app/services/pages/migrate_legacy_storage_to_deployment_service.rb
new file mode 100644
index 00000000000..7a580e1a097
--- /dev/null
+++ b/app/services/pages/migrate_legacy_storage_to_deployment_service.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+module Pages
+ class MigrateLegacyStorageToDeploymentService
+ ExclusiveLeaseTaken = Class.new(StandardError)
+
+ include ::Pages::LegacyStorageLease
+
+ attr_reader :project
+
+ def initialize(project)
+ @project = project
+ end
+
+ def execute
+ migrated = try_obtain_lease do
+ execute_unsafe
+
+ true
+ end
+
+ raise ExclusiveLeaseTaken, "Can't migrate pages for project #{project.id}: exclusive lease taken" unless migrated
+ end
+
+ private
+
+ def execute_unsafe
+ archive_path, entries_count = ::Pages::ZipDirectoryService.new(project.pages_path).execute
+
+ deployment = nil
+ File.open(archive_path) do |file|
+ deployment = project.pages_deployments.create!(
+ file: file,
+ file_count: entries_count,
+ file_sha256: Digest::SHA256.file(archive_path).hexdigest
+ )
+ end
+
+ project.set_first_pages_deployment!(deployment)
+ rescue ::Pages::ZipDirectoryService::InvalidArchiveError => e
+ Gitlab::ErrorTracking.track_exception(e, project_id: project.id)
+
+ if !project.pages_metadatum&.reload&.pages_deployment &&
+ Feature.enabled?(:pages_migration_mark_as_not_deployed, project)
+ project.mark_pages_as_not_deployed
+ end
+
+ ensure
+ FileUtils.rm_f(archive_path) if archive_path
+ end
+ end
+end
diff --git a/app/services/pages/zip_directory_service.rb b/app/services/pages/zip_directory_service.rb
index a27ad5fda46..5166526dc4c 100644
--- a/app/services/pages/zip_directory_service.rb
+++ b/app/services/pages/zip_directory_service.rb
@@ -2,32 +2,40 @@
module Pages
class ZipDirectoryService
+ include Gitlab::Utils::StrongMemoize
+
InvalidArchiveError = Class.new(RuntimeError)
InvalidEntryError = Class.new(RuntimeError)
PUBLIC_DIR = 'public'
def initialize(input_dir)
- @input_dir = File.realpath(input_dir)
- @output_file = File.join(@input_dir, "@migrated.zip") # '@' to avoid any name collision with groups or projects
+ @input_dir = input_dir
end
def execute
- FileUtils.rm_f(@output_file)
+ raise InvalidArchiveError unless valid_work_directory?
+
+ output_file = File.join(real_dir, "@migrated.zip") # '@' to avoid any name collision with groups or projects
+
+ FileUtils.rm_f(output_file)
count = 0
- ::Zip::File.open(@output_file, ::Zip::File::CREATE) do |zipfile|
+ ::Zip::File.open(output_file, ::Zip::File::CREATE) do |zipfile|
write_entry(zipfile, PUBLIC_DIR)
count = zipfile.entries.count
end
- [@output_file, count]
+ [output_file, count]
+ rescue => e
+ FileUtils.rm_f(output_file) if output_file
+ raise e
end
private
def write_entry(zipfile, zipfile_path)
- disk_file_path = File.join(@input_dir, zipfile_path)
+ disk_file_path = File.join(real_dir, zipfile_path)
unless valid_path?(disk_file_path)
# archive without public directory is completelly unusable
@@ -71,13 +79,27 @@ module Pages
def valid_path?(disk_file_path)
realpath = File.realpath(disk_file_path)
- realpath == File.join(@input_dir, PUBLIC_DIR) ||
- realpath.start_with?(File.join(@input_dir, PUBLIC_DIR + "/"))
+ realpath == File.join(real_dir, PUBLIC_DIR) ||
+ realpath.start_with?(File.join(real_dir, PUBLIC_DIR + "/"))
# happens if target of symlink isn't there
rescue => e
- Gitlab::ErrorTracking.track_exception(e, input_dir: @input_dir, disk_file_path: disk_file_path)
+ Gitlab::ErrorTracking.track_exception(e, input_dir: real_dir, disk_file_path: disk_file_path)
false
end
+
+ def valid_work_directory?
+ Dir.exist?(real_dir)
+ rescue => e
+ Gitlab::ErrorTracking.track_exception(e, input_dir: @input_dir)
+
+ false
+ end
+
+ def real_dir
+ strong_memoize(:real_dir) do
+ File.realpath(@input_dir) rescue nil
+ end
+ end
end
end
diff --git a/app/views/projects/pipelines/index.html.haml b/app/views/projects/pipelines/index.html.haml
index 64ae4ff8daf..6a4dd88ae07 100644
--- a/app/views/projects/pipelines/index.html.haml
+++ b/app/views/projects/pipelines/index.html.haml
@@ -17,4 +17,4 @@
"new-pipeline-path" => can?(current_user, :create_pipeline, @project) && new_project_pipeline_path(@project),
"ci-lint-path" => can?(current_user, :create_pipeline, @project) && project_ci_lint_path(@project),
"reset-cache-path" => can?(current_user, :admin_pipeline, @project) && reset_cache_project_settings_ci_cd_path(@project) ,
- "has-gitlab-ci" => (@project.has_ci? && @project.builds_enabled?).to_s } }
+ "has-gitlab-ci" => has_gitlab_ci?(@project).to_s } }
diff --git a/changelogs/unreleased/292680-refactor-users_controller_spec-rb-ssh_keys.yml b/changelogs/unreleased/292680-refactor-users_controller_spec-rb-ssh_keys.yml
new file mode 100644
index 00000000000..8361383aff1
--- /dev/null
+++ b/changelogs/unreleased/292680-refactor-users_controller_spec-rb-ssh_keys.yml
@@ -0,0 +1,5 @@
+---
+title: Refactor specs around ssh_keys in users_controller_spec.rb
+merge_request: 50338
+author: Takuya Noguchi
+type: other
diff --git a/changelogs/unreleased/expose-resolved_at-via-api.yml b/changelogs/unreleased/expose-resolved_at-via-api.yml
new file mode 100644
index 00000000000..df9904edc6b
--- /dev/null
+++ b/changelogs/unreleased/expose-resolved_at-via-api.yml
@@ -0,0 +1,5 @@
+---
+title: Expose notes resolved_at via API
+merge_request: 49821
+author: Lee Tickett
+type: added
diff --git a/config/feature_flags/development/pages_migration_mark_as_not_deployed.yml b/config/feature_flags/development/pages_migration_mark_as_not_deployed.yml
new file mode 100644
index 00000000000..f8b28785a2d
--- /dev/null
+++ b/config/feature_flags/development/pages_migration_mark_as_not_deployed.yml
@@ -0,0 +1,8 @@
+---
+name: pages_migration_mark_as_not_deployed
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/49473
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/295187
+milestone: '13.8'
+type: development
+group: group::release
+default_enabled: false
diff --git a/doc/api/discussions.md b/doc/api/discussions.md
index 51ca2bea3ae..a7a53987fe9 100644
--- a/doc/api/discussions.md
+++ b/doc/api/discussions.md
@@ -687,7 +687,8 @@ GET /projects/:id/merge_requests/:merge_request_iid/discussions
"noteable_iid": null,
"resolved": false,
"resolvable": true,
- "resolved_by": null
+ "resolved_by": null,
+ "resolved_at": null
},
{
"id": 1129,
diff --git a/doc/api/graphql/reference/gitlab_schema.graphql b/doc/api/graphql/reference/gitlab_schema.graphql
index 0ec30272c51..dbe5bb9e3f8 100644
--- a/doc/api/graphql/reference/gitlab_schema.graphql
+++ b/doc/api/graphql/reference/gitlab_schema.graphql
@@ -25249,6 +25249,11 @@ type Vulnerability implements Noteable {
resolvedAt: Time
"""
+ The user that resolved the vulnerability.
+ """
+ resolvedBy: User
+
+ """
Indicates whether the vulnerability is fixed on the default branch or not
"""
resolvedOnDefaultBranch: Boolean!
diff --git a/doc/api/graphql/reference/gitlab_schema.json b/doc/api/graphql/reference/gitlab_schema.json
index 3a83b0d0514..1a7b61c1e69 100644
--- a/doc/api/graphql/reference/gitlab_schema.json
+++ b/doc/api/graphql/reference/gitlab_schema.json
@@ -73469,6 +73469,20 @@
"deprecationReason": null
},
{
+ "name": "resolvedBy",
+ "description": "The user that resolved the vulnerability.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "User",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "resolvedOnDefaultBranch",
"description": "Indicates whether the vulnerability is fixed on the default branch or not",
"args": [
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 4c7576696a1..ce30dec5206 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -3795,6 +3795,7 @@ Represents a vulnerability.
| `project` | Project | The project on which the vulnerability was found |
| `reportType` | VulnerabilityReportType | Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST, SECRET_DETECTION, COVERAGE_FUZZING, API_FUZZING) |
| `resolvedAt` | Time | Timestamp of when the vulnerability state was changed to resolved |
+| `resolvedBy` | User | The user that resolved the vulnerability. |
| `resolvedOnDefaultBranch` | Boolean! | Indicates whether the vulnerability is fixed on the default branch or not |
| `scanner` | VulnerabilityScanner | Scanner metadata for the vulnerability. |
| `severity` | VulnerabilitySeverity | Severity of the vulnerability (INFO, UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL) |
diff --git a/doc/api/scim.md b/doc/api/scim.md
index a9622525478..6e67b6fbc2d 100644
--- a/doc/api/scim.md
+++ b/doc/api/scim.md
@@ -15,7 +15,7 @@ The SCIM API implements the [RFC7644 protocol](https://tools.ietf.org/html/rfc76
To use this API, [Group SSO](../user/group/saml_sso/index.md) must be enabled for the group.
This API is only in use where [SCIM for Group SSO](../user/group/saml_sso/scim_setup.md) is enabled. It's a prerequisite to the creation of SCIM identities.
-## Get a list of SAML users
+## Get a list of SCIM provisioned users
This endpoint is used as part of the SCIM syncing mechanism. It only returns
a single user based on a unique ID which should match the `extern_uid` of the user.
@@ -74,7 +74,7 @@ Example response:
}
```
-## Get a single SAML user
+## Get a single SCIM provisioned user
```plaintext
GET /api/scim/v2/groups/:group_path/Users/:id
@@ -115,7 +115,7 @@ Example response:
}
```
-## Create a SAML user
+## Create a SCIM provisioned user
```plaintext
POST /api/scim/v2/groups/:group_path/Users/
@@ -161,7 +161,7 @@ Example response:
Returns a `201` status code if successful.
-## Update a single SAML user
+## Update a single SCIM provisioned user
Fields that can be updated are:
@@ -193,7 +193,7 @@ curl --verbose --request PATCH "https://gitlab.example.com/api/scim/v2/groups/te
Returns an empty response with a `204` status code if successful.
-## Remove a single SAML user
+## Remove a single SCIM provisioned user
Removes the user's SSO identity and group membership.
diff --git a/doc/ci/merge_request_pipelines/img/pipeline-fork_v13_7.png b/doc/ci/merge_request_pipelines/img/pipeline-fork_v13_7.png
new file mode 100644
index 00000000000..03aefcebca2
--- /dev/null
+++ b/doc/ci/merge_request_pipelines/img/pipeline-fork_v13_7.png
Binary files differ
diff --git a/doc/ci/merge_request_pipelines/index.md b/doc/ci/merge_request_pipelines/index.md
index 220032eeab1..999d15eac24 100644
--- a/doc/ci/merge_request_pipelines/index.md
+++ b/doc/ci/merge_request_pipelines/index.md
@@ -177,6 +177,10 @@ coming from a fork:
- It's created and runs in the fork (source) project, not the parent (target) project.
- It uses the fork project's CI/CD configuration and resources.
+If a pipeline runs in a fork, the **fork** icon appears for the pipeline in the merge request.
+
+![Pipeline ran in fork](img/pipeline-fork_v13_7.png)
+
Sometimes parent project members want the pipeline to run in the parent
project. This could be to ensure that the post-merge pipeline passes in the parent project.
For example, a fork project could try to use a corrupted runner that doesn't execute
diff --git a/doc/development/experiment_guide/index.md b/doc/development/experiment_guide/index.md
index 35cd55b199c..6f8571a50c5 100644
--- a/doc/development/experiment_guide/index.md
+++ b/doc/development/experiment_guide/index.md
@@ -337,6 +337,27 @@ to the URL:
https://gitlab.com/<EXPERIMENT_ENTRY_URL>?force_experiment=<EXPERIMENT_KEY>
```
+### A cookie-based approach to force an experiment
+
+It's possible to force the current user to be in the experiment group for <EXPERIMENT_KEY>
+during the browser session by using your browser's developer tools:
+
+```javascript
+document.cookie = "force_experiment=<EXPERIMENT_KEY>; path=/";
+```
+
+Use a comma to list more than one experiment to be forced:
+
+```javascript
+document.cookie = "force_experiment=<EXPERIMENT_KEY>,<ANOTHER_EXPERIMENT_KEY>; path=/";
+```
+
+Clear the experiments by unsetting the `force_experiment` cookie:
+
+```javascript
+document.cookie = "force_experiment=; path=/";
+```
+
### Testing and test helpers
#### RSpec
diff --git a/doc/topics/autodevops/upgrading_auto_deploy_dependencies.md b/doc/topics/autodevops/upgrading_auto_deploy_dependencies.md
index c45390e935d..663060bf59d 100644
--- a/doc/topics/autodevops/upgrading_auto_deploy_dependencies.md
+++ b/doc/topics/autodevops/upgrading_auto_deploy_dependencies.md
@@ -161,7 +161,7 @@ For example, if the template is bundled in GitLab v13.3, change your `.gitlab-ci
```yaml
include:
- template: Auto-DevOps.gitlab-ci.yml
- - remote: https://gitlab.com/gitlab-org/gitlab/-/blob/v13.3.0-ee/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
+ - remote: https://gitlab.com/gitlab-org/gitlab/-/raw/v13.3.0-ee/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
```
### Ignore warnings and continue deploying
@@ -181,7 +181,7 @@ the latest Auto Deploy template into your `.gitlab-ci.yml`:
```yaml
include:
- template: Auto-DevOps.gitlab-ci.yml
- - remote: https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml
+ - remote: https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml
```
WARNING:
diff --git a/doc/user/admin_area/settings/continuous_integration.md b/doc/user/admin_area/settings/continuous_integration.md
index ef2eb046c21..139fd22a563 100644
--- a/doc/user/admin_area/settings/continuous_integration.md
+++ b/doc/user/admin_area/settings/continuous_integration.md
@@ -174,7 +174,7 @@ but commented out to help encourage others to add to it in the future. -->
WARNING:
This feature is being re-evaluated in favor of a different
-[compliance solution](https://gitlab.com/gitlab-org/gitlab/-/issues/34830).
+[compliance solution](https://gitlab.com/groups/gitlab-org/-/epics/3156).
We recommend that users who haven't yet implemented this feature wait for
the new solution.
diff --git a/doc/user/group/saml_sso/scim_setup.md b/doc/user/group/saml_sso/scim_setup.md
index 40c036e1fc0..cd3e99ae541 100644
--- a/doc/user/group/saml_sso/scim_setup.md
+++ b/doc/user/group/saml_sso/scim_setup.md
@@ -232,7 +232,7 @@ It is important that this SCIM `id` and SCIM `externalId` are configured to the
Group owners can see the list of users and the `externalId` stored for each user in the group SAML SSO Settings page.
-A possible alternative is to use the [SCIM API](../../../api/scim.md#get-a-list-of-saml-users) to manually retrieve the `externalId` we have stored for users, also called the `external_uid` or `NameId`.
+A possible alternative is to use the [SCIM API](../../../api/scim.md#get-a-list-of-scim-provisioned-users) to manually retrieve the `externalId` we have stored for users, also called the `external_uid` or `NameId`.
To see how the `external_uid` compares to the value returned as the SAML NameId, you can have the user use a [SAML Tracer](index.md#saml-debugging-tools).
@@ -251,7 +251,7 @@ you can address the problem in the following ways:
- You can have users unlink and relink themselves, based on the ["SAML authentication failed: User has already been taken"](index.md#message-saml-authentication-failed-user-has-already-been-taken) section.
- You can unlink all users simultaneously, by removing all users from the SAML app while provisioning is turned on.
-- It may be possible to use the [SCIM API](../../../api/scim.md#update-a-single-saml-user) to manually correct the `externalId` stored for users to match the SAML `NameId`.
+- It may be possible to use the [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) to manually correct the `externalId` stored for users to match the SAML `NameId`.
To look up a user, you'll need to know the desired value that matches the `NameId` as well as the current `externalId`.
It is important not to update these to incorrect values, since this will cause users to be unable to sign in. It is also important not to assign a value to the wrong user, as this would cause users to get signed into the wrong account.
@@ -269,7 +269,7 @@ Changing the SAML or SCIM configuration or provider can cause the following prob
| Problem | Solution |
|------------------------------------------------------------------------------|--------------------|
| SAML and SCIM identity mismatch. | First [verify that the user's SAML NameId matches the SCIM externalId](#how-do-i-verify-users-saml-nameid-matches-the-scim-externalid) and then [update or fix the mismatched SCIM externalId and SAML NameId](#update-or-fix-mismatched-scim-externalid-and-saml-nameid). |
-| SCIM identity mismatch between GitLab and the Identify Provider SCIM app. | You can confirm whether you're hitting the error because of your SCIM identity mismatch between your SCIM app and GitLab.com by using [SCIM API](../../../api/scim.md#update-a-single-saml-user) which shows up in the `id` key and compares it with the user `externalId` in the SCIM app. You can use the same [SCIM API](../../../api/scim.md#update-a-single-saml-user) to update the SCIM `id` for the user on GitLab.com. |
+| SCIM identity mismatch between GitLab and the Identify Provider SCIM app. | You can confirm whether you're hitting the error because of your SCIM identity mismatch between your SCIM app and GitLab.com by using [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) which shows up in the `id` key and compares it with the user `externalId` in the SCIM app. You can use the same [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) to update the SCIM `id` for the user on GitLab.com. |
### Azure
diff --git a/doc/user/infrastructure/terraform_state.md b/doc/user/infrastructure/terraform_state.md
index cbd2a63524d..c5496c225d3 100644
--- a/doc/user/infrastructure/terraform_state.md
+++ b/doc/user/infrastructure/terraform_state.md
@@ -104,7 +104,7 @@ and the CI YAML file:
```
1. In the root directory of your project repository, configure a
- `.gitlab-ci.yaml` file. This example uses a pre-built image which includes a
+ `.gitlab-ci.yml` file. This example uses a pre-built image which includes a
`gitlab-terraform` helper. For supported Terraform versions, see the [GitLab
Terraform Images project](https://gitlab.com/gitlab-org/terraform-images).
@@ -112,7 +112,7 @@ and the CI YAML file:
image: registry.gitlab.com/gitlab-org/terraform-images/stable:latest
```
-1. In the `.gitlab-ci.yaml` file, define some environment variables to ease
+1. In the `.gitlab-ci.yml` file, define some environment variables to ease
development. In this example, `TF_ROOT` is the directory where the Terraform
commands must be executed, `TF_ADDRESS` is the URL to the state on the GitLab
instance where this pipeline runs, and the final path segment in `TF_ADDRESS`
diff --git a/doc/user/project/service_desk.md b/doc/user/project/service_desk.md
index 52686d3c243..32eaf79c16d 100644
--- a/doc/user/project/service_desk.md
+++ b/doc/user/project/service_desk.md
@@ -94,7 +94,9 @@ navigation's **Issues** menu.
### Using customized email templates
- > [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/2460) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.7.
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/2460) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.7.
+> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/214839) to [GitLab Starter](https://about.gitlab.com/pricing/) in 13.0.
+> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/215364) to [GitLab Core](https://about.gitlab.com/pricing/) in 13.2.
An email is sent to the author when:
@@ -255,7 +257,6 @@ issues created through customer support requests, and filter or interact with th
![Service Desk Issue tracker](img/service_desk_issue_tracker.png)
-Messages from the end user are shown as coming from the special Support Bot user. Apart from this,
Messages from the end user are shown as coming from the special
[Support Bot user](../../subscriptions/self_managed/index.md#billable-users).
You can read and write comments as you normally do in GitLab:
diff --git a/lib/api/entities/note.rb b/lib/api/entities/note.rb
index 9a60c04220d..a597aa7bb4a 100644
--- a/lib/api/entities/note.rb
+++ b/lib/api/entities/note.rb
@@ -23,6 +23,7 @@ module API
expose :resolvable?, as: :resolvable
expose :resolved?, as: :resolved, if: ->(note, options) { note.resolvable? }
expose :resolved_by, using: Entities::UserBasic, if: ->(note, options) { note.resolvable? }
+ expose :resolved_at, if: ->(note, options) { note.resolvable? }
expose :confidential?, as: :confidential
diff --git a/lib/gitlab/experimentation.rb b/lib/gitlab/experimentation.rb
index 9dbe29924db..4abe697263d 100644
--- a/lib/gitlab/experimentation.rb
+++ b/lib/gitlab/experimentation.rb
@@ -93,6 +93,9 @@ module Gitlab
},
ci_syntax_templates: {
tracking_category: 'Growth::Activation::Experiment::CiSyntaxTemplates'
+ },
+ pipelines_empty_state: {
+ tracking_category: 'Growth::Activation::Experiment::PipelinesEmptyState'
}
}.freeze
diff --git a/lib/gitlab/experimentation/controller_concern.rb b/lib/gitlab/experimentation/controller_concern.rb
index c85d3f4eee6..ecabfa21f7d 100644
--- a/lib/gitlab/experimentation/controller_concern.rb
+++ b/lib/gitlab/experimentation/controller_concern.rb
@@ -130,7 +130,10 @@ module Gitlab
end
def forced_enabled?(experiment_key)
- params.has_key?(:force_experiment) && params[:force_experiment] == experiment_key.to_s
+ return true if params.has_key?(:force_experiment) && params[:force_experiment] == experiment_key.to_s
+ return false if cookies[:force_experiment].blank?
+
+ cookies[:force_experiment].to_s.split(',').any? { |experiment| experiment.strip == experiment_key.to_s }
end
def tracking_label(subject)
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 4bcde93332d..1295f1ce085 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -20458,9 +20458,15 @@ msgstr ""
msgid "Pipelines|Editor"
msgstr ""
+msgid "Pipelines|Get started with CI/CD"
+msgstr ""
+
msgid "Pipelines|Get started with Pipelines"
msgstr ""
+msgid "Pipelines|GitLab CI/CD can automatically build, test, and deploy your code. Let GitLab take care of time consuming tasks, so you can spend more time creating."
+msgstr ""
+
msgid "Pipelines|Group %{namespace_name} has %{percentage}%% or less Shared Runner Pipeline minutes remaining. Once it runs out, no new jobs or pipelines in its projects will run."
msgstr ""
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index 436daed0af6..66825a81ac0 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -63,9 +63,16 @@ RSpec.describe Import::BulkImportsController do
)
end
+ let(:client_params) do
+ {
+ top_level_only: true,
+ min_access_level: Gitlab::Access::MAINTAINER
+ }
+ end
+
before do
allow(controller).to receive(:client).and_return(client)
- allow(client).to receive(:get).with('groups', top_level_only: true).and_return(client_response)
+ allow(client).to receive(:get).with('groups', client_params).and_return(client_response)
end
it 'returns serialized group data' do
@@ -73,6 +80,17 @@ RSpec.describe Import::BulkImportsController do
expect(json_response).to eq({ importable_data: client_response.parsed_response }.as_json)
end
+
+ context 'when filtering' do
+ it 'returns filtered result' do
+ filter = 'test'
+ search_params = client_params.merge(search: filter)
+
+ expect(client).to receive(:get).with('groups', search_params).and_return(client_response)
+
+ get :status, format: :json, params: { filter: filter }
+ end
+ end
end
context 'when host url is local or not http' do
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index e1405660ccb..be4a1504fc9 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -272,6 +272,72 @@ RSpec.describe Projects::PipelinesController do
end
end
+ describe 'GET #index' do
+ subject(:request) { get :index, params: { namespace_id: project.namespace, project_id: project } }
+
+ context 'experiment not active' do
+ it 'does not push tracking_data to gon' do
+ request
+
+ expect(Gon.tracking_data).to be_nil
+ end
+
+ it 'does not record experiment_user' do
+ expect { request }.not_to change(ExperimentUser, :count)
+ end
+ end
+
+ context 'when experiment active' do
+ before do
+ stub_experiment(pipelines_empty_state: true)
+ stub_experiment_for_subject(pipelines_empty_state: true)
+ end
+
+ it 'pushes tracking_data to Gon' do
+ request
+
+ expect(Gon.experiments["pipelinesEmptyState"]).to eq(true)
+ expect(Gon.tracking_data).to match(
+ {
+ category: 'Growth::Activation::Experiment::PipelinesEmptyState',
+ action: 'view',
+ label: anything,
+ property: 'experimental_group',
+ value: anything
+ }
+ )
+ end
+
+ context 'no pipelines created an no CI set up' do
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ end
+
+ it 'records experiment_user' do
+ expect { request }.to change(ExperimentUser, :count).by(1)
+ end
+ end
+
+ context 'CI set up' do
+ it 'does not record experiment_user' do
+ expect { request }.not_to change(ExperimentUser, :count)
+ end
+ end
+
+ context 'pipelines created' do
+ let!(:pipeline) { create(:ci_pipeline, project: project) }
+
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ end
+
+ it 'does not record experiment_user' do
+ expect { request }.not_to change(ExperimentUser, :count)
+ end
+ end
+ end
+ end
+
describe 'GET show.json' do
let(:pipeline) { create(:ci_pipeline, project: project) }
diff --git a/spec/controllers/users_controller_spec.rb b/spec/controllers/users_controller_spec.rb
index 8809d859f7f..b5f4b765a23 100644
--- a/spec/controllers/users_controller_spec.rb
+++ b/spec/controllers/users_controller_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe UsersController do
context 'with rendered views' do
render_views
- describe 'when logged in' do
+ context 'when logged in' do
before do
sign_in(user)
end
@@ -25,7 +25,7 @@ RSpec.describe UsersController do
end
end
- describe 'when logged out' do
+ context 'when logged out' do
it 'renders the show template' do
get :show, params: { username: user.username }
@@ -119,7 +119,7 @@ RSpec.describe UsersController do
context 'with rendered views' do
render_views
- describe 'when logged in' do
+ context 'when logged in' do
before do
sign_in(user)
end
@@ -132,7 +132,7 @@ RSpec.describe UsersController do
end
end
- describe 'when logged out' do
+ context 'when logged out' do
it 'renders the show template' do
get :activity, params: { username: user.username }
@@ -222,51 +222,47 @@ RSpec.describe UsersController do
end
end
- describe "#ssh_keys" do
- describe "non existent user" do
- it "does not generally work" do
+ describe 'GET #ssh_keys' do
+ context 'non existent user' do
+ it 'does not generally work' do
get :ssh_keys, params: { username: 'not-existent' }
expect(response).not_to be_successful
end
end
- describe "user with no keys" do
- it "does generally work" do
+ context 'user with no keys' do
+ it 'does generally work' do
get :ssh_keys, params: { username: user.username }
expect(response).to be_successful
end
- it "renders all keys separated with a new line" do
+ it 'renders all keys separated with a new line' do
get :ssh_keys, params: { username: user.username }
expect(response.body).to eq("")
end
- it "responds with text/plain content type" do
+ it 'responds with text/plain content type' do
get :ssh_keys, params: { username: user.username }
expect(response.content_type).to eq("text/plain")
end
end
- describe "user with keys" do
+ context 'user with keys' do
let!(:key) { create(:key, user: user) }
let!(:another_key) { create(:another_key, user: user) }
let!(:deploy_key) { create(:deploy_key, user: user) }
- describe "while signed in" do
- before do
- sign_in(user)
- end
-
- it "does generally work" do
+ shared_examples_for 'renders all public keys' do
+ it 'does generally work' do
get :ssh_keys, params: { username: user.username }
expect(response).to be_successful
end
- it "renders all non deploy keys separated with a new line" do
+ it 'renders all non deploy keys separated with a new line' do
get :ssh_keys, params: { username: user.username }
expect(response.body).not_to eq('')
@@ -278,90 +274,79 @@ RSpec.describe UsersController do
expect(response.body).not_to include(deploy_key.key)
end
- it "does not render the comment of the key" do
+ it 'does not render the comment of the key' do
get :ssh_keys, params: { username: user.username }
+
expect(response.body).not_to match(/dummy@gitlab.com/)
end
- it "responds with text/plain content type" do
+ it 'responds with text/plain content type' do
get :ssh_keys, params: { username: user.username }
expect(response.content_type).to eq("text/plain")
end
end
- describe 'when logged out' do
+ context 'while signed in' do
before do
- sign_out(user)
- end
-
- it "still does generally work" do
- get :ssh_keys, params: { username: user.username }
-
- expect(response).to be_successful
+ sign_in(user)
end
- it "renders all non deploy keys separated with a new line" do
- get :ssh_keys, params: { username: user.username }
-
- expect(response.body).not_to eq('')
- expect(response.body).to eq(user.all_ssh_keys.join("\n"))
-
- expect(response.body).to include(key.key.sub(' dummy@gitlab.com', ''))
- expect(response.body).to include(another_key.key.sub(' dummy@gitlab.com', ''))
-
- expect(response.body).not_to include(deploy_key.key)
- end
+ it_behaves_like 'renders all public keys'
+ end
- it "does not render the comment of the key" do
- get :ssh_keys, params: { username: user.username }
- expect(response.body).not_to match(/dummy@gitlab.com/)
+ context 'when logged out' do
+ before do
+ sign_out(user)
end
- it "responds with text/plain content type" do
- get :ssh_keys, params: { username: user.username }
-
- expect(response.content_type).to eq("text/plain")
- end
+ it_behaves_like 'renders all public keys'
end
end
end
- describe "#gpg_keys" do
- describe "non existent user" do
- it "does not generally work" do
+ describe 'GET #gpg_keys' do
+ context 'non existent user' do
+ it 'does not generally work' do
get :gpg_keys, params: { username: 'not-existent' }
expect(response).not_to be_successful
end
end
- describe "user with no keys" do
- it "responds the empty body with text/plain content type" do
+ context 'user with no keys' do
+ it 'does generally work' do
get :gpg_keys, params: { username: user.username }
expect(response).to be_successful
+ end
+
+ it 'renders all keys separated with a new line' do
+ get :gpg_keys, params: { username: user.username }
+
+ expect(response.body).to eq("")
+ end
+
+ it 'responds with text/plain content type' do
+ get :gpg_keys, params: { username: user.username }
+
expect(response.content_type).to eq("text/plain")
expect(response.body).to eq("")
end
end
- describe "user with keys" do
+ context 'user with keys' do
let!(:gpg_key) { create(:gpg_key, user: user) }
let!(:another_gpg_key) { create(:another_gpg_key, user: user) }
- describe "while signed in" do
- before do
- sign_in(user)
- end
-
- it "does generally work" do
+ shared_examples_for 'renders all verified GPG keys' do
+ it 'does generally work' do
get :gpg_keys, params: { username: user.username }
expect(response).to be_successful
end
- it "renders all verified keys separated with a new line with text/plain content type" do
+ it 'renders all verified keys separated with a new line with text/plain content type' do
get :gpg_keys, params: { username: user.username }
expect(response.content_type).to eq("text/plain")
@@ -374,37 +359,29 @@ RSpec.describe UsersController do
end
end
- describe 'when logged out' do
+ context 'while signed in' do
before do
- sign_out(user)
+ sign_in(user)
end
- it "still does generally work" do
- get :gpg_keys, params: { username: user.username }
+ it_behaves_like 'renders all verified GPG keys'
+ end
- expect(response).to be_successful
+ context 'when logged out' do
+ before do
+ sign_out(user)
end
- it "renders all verified keys separated with a new line with text/plain content type" do
- get :gpg_keys, params: { username: user.username }
-
- expect(response.content_type).to eq("text/plain")
-
- expect(response.body).not_to eq('')
- expect(response.body).to eq(user.gpg_keys.map(&:key).join("\n"))
-
- expect(response.body).to include(gpg_key.key)
- expect(response.body).to include(another_gpg_key.key)
- end
+ it_behaves_like 'renders all verified GPG keys'
end
- describe 'when revoked' do
+ context 'when revoked' do
before do
sign_in(user)
another_gpg_key.revoke
end
- it "doesn't render revoked keys" do
+ it 'doesn\'t render revoked keys' do
get :gpg_keys, params: { username: user.username }
expect(response.body).not_to eq('')
@@ -413,7 +390,7 @@ RSpec.describe UsersController do
expect(response.body).not_to include(another_gpg_key.key)
end
- it "doesn't render revoked keys for non-authorized users" do
+ it 'doesn\'t render revoked keys for non-authorized users' do
sign_out(user)
get :gpg_keys, params: { username: user.username }
diff --git a/spec/features/groups/import_export/connect_instance_spec.rb b/spec/features/groups/import_export/connect_instance_spec.rb
index c0f967fd0b9..2e1bf27ba8b 100644
--- a/spec/features/groups/import_export/connect_instance_spec.rb
+++ b/spec/features/groups/import_export/connect_instance_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
pat = 'demo-pat'
stub_path = 'stub-group'
- stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=30&top_level_only=true" % { url: source_url }).to_return(
+ stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=30&top_level_only=true&min_access_level=40" % { url: source_url }).to_return(
body: [{
id: 2595438,
web_url: 'https://gitlab.com/groups/auto-breakfast',
diff --git a/spec/fixtures/api/schemas/entities/discussion.json b/spec/fixtures/api/schemas/entities/discussion.json
index 2afabcc9195..1a5b8150ed2 100644
--- a/spec/fixtures/api/schemas/entities/discussion.json
+++ b/spec/fixtures/api/schemas/entities/discussion.json
@@ -48,6 +48,7 @@
"resolved": { "type": "boolean" },
"resolvable": { "type": "boolean" },
"resolved_by": { "type": ["string", "null"] },
+ "resolved_at": { "type": ["date", "null"] },
"note": { "type": "string" },
"note_html": { "type": "string" },
"current_user": { "type": "object" },
diff --git a/spec/fixtures/api/schemas/public_api/v4/notes.json b/spec/fixtures/api/schemas/public_api/v4/notes.json
index 683dcb19836..c4510207882 100644
--- a/spec/fixtures/api/schemas/public_api/v4/notes.json
+++ b/spec/fixtures/api/schemas/public_api/v4/notes.json
@@ -31,6 +31,7 @@
"resolved": { "type": "boolean" },
"resolvable": { "type": "boolean" },
"resolved_by": { "type": ["string", "null"] },
+ "resolved_at": { "type": ["date", "null"] },
"confidential": { "type": ["boolean", "null"] }
},
"required": [
diff --git a/spec/frontend/pipelines/empty_state_spec.js b/spec/frontend/pipelines/empty_state_spec.js
index 28a73c8863c..7e42a3b5ae9 100644
--- a/spec/frontend/pipelines/empty_state_spec.js
+++ b/spec/frontend/pipelines/empty_state_spec.js
@@ -1,5 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+import { withGonExperiment } from 'helpers/experimentation_helper';
import EmptyState from '~/pipelines/components/pipelines_list/empty_state.vue';
+import Tracking from '~/tracking';
describe('Pipelines Empty State', () => {
let wrapper;
@@ -38,15 +40,104 @@ describe('Pipelines Empty State', () => {
expect(findGetStartedButton().attributes('href')).toBe('foo');
});
- it('should render empty state information', () => {
- expect(findInfoText()).toContain(
- 'Continuous Integration can help catch bugs by running your tests automatically',
- 'while Continuous Deployment can help you deliver code to your product environment',
- );
+ describe('when in control group', () => {
+ it('should render empty state information', () => {
+ expect(findInfoText()).toContain(
+ 'Continuous Integration can help catch bugs by running your tests automatically',
+ 'while Continuous Deployment can help you deliver code to your product environment',
+ );
+ });
+
+ it('should render a button', () => {
+ expect(findGetStartedButton().text()).toBe('Get started with Pipelines');
+ });
+ });
+
+ describe('when in experiment group', () => {
+ withGonExperiment('pipelinesEmptyState');
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('should render empty state information', () => {
+ expect(findInfoText()).toContain(
+ 'GitLab CI/CD can automatically build, test, and deploy your code. Let GitLab take care of time',
+ 'consuming tasks, so you can spend more time creating',
+ );
+ });
+
+ it('should render button text', () => {
+ expect(findGetStartedButton().text()).toBe('Get started with CI/CD');
+ });
});
- it('should render a button', () => {
- expect(findGetStartedButton().text()).toBe('Get started with Pipelines');
+ describe('tracking', () => {
+ let origGon;
+
+ describe('when data is set', () => {
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event').mockImplementation(() => {});
+ origGon = window.gon;
+
+ window.gon = {
+ tracking_data: {
+ category: 'Growth::Activation::Experiment::PipelinesEmptyState',
+ value: 1,
+ property: 'experimental_group',
+ label: 'label',
+ },
+ };
+ createWrapper();
+ });
+
+ afterEach(() => {
+ window.gon = origGon;
+ });
+
+ it('tracks when mounted', () => {
+ expect(Tracking.event).toHaveBeenCalledWith(
+ 'Growth::Activation::Experiment::PipelinesEmptyState',
+ 'viewed',
+ {
+ value: 1,
+ label: 'label',
+ property: 'experimental_group',
+ },
+ );
+ });
+
+ it('tracks when button is clicked', () => {
+ findGetStartedButton().vm.$emit('click');
+
+ expect(Tracking.event).toHaveBeenCalledWith(
+ 'Growth::Activation::Experiment::PipelinesEmptyState',
+ 'documentation_clicked',
+ {
+ value: 1,
+ label: 'label',
+ property: 'experimental_group',
+ },
+ );
+ });
+ });
+
+ describe('when no data is defined', () => {
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event').mockImplementation(() => {});
+
+ createWrapper();
+ });
+
+ it('does not track on view', () => {
+ expect(Tracking.event).not.toHaveBeenCalled();
+ });
+
+ it('does not track when button is clicked', () => {
+ findGetStartedButton().vm.$emit('click');
+ expect(Tracking.event).not.toHaveBeenCalled();
+ });
+ });
});
});
});
diff --git a/spec/helpers/ci/pipelines_helper_spec.rb b/spec/helpers/ci/pipelines_helper_spec.rb
index a96d6e7711f..94b5e707d73 100644
--- a/spec/helpers/ci/pipelines_helper_spec.rb
+++ b/spec/helpers/ci/pipelines_helper_spec.rb
@@ -52,4 +52,23 @@ RSpec.describe Ci::PipelinesHelper do
end
end
end
+
+ describe 'has_gitlab_ci?' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:has_gitlab_ci?) { helper.has_gitlab_ci?(project) }
+
+ let(:project) { double(:project, has_ci?: has_ci?, builds_enabled?: builds_enabled?) }
+
+ where(:builds_enabled?, :has_ci?, :result) do
+ true | true | true
+ true | false | false
+ false | true | false
+ false | false | false
+ end
+
+ with_them do
+ it { expect(has_gitlab_ci?).to eq(result) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 03cb89ee033..c47f71c207d 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -156,6 +156,16 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
is_expected.to eq(true)
end
end
+
+ context 'Cookie parameter to force enable experiment' do
+ it 'returns true unconditionally' do
+ cookies[:force_experiment] = 'test_experiment,another_experiment'
+ get :index
+
+ expect(check_experiment(:test_experiment)).to eq(true)
+ expect(check_experiment(:another_experiment)).to eq(true)
+ end
+ end
end
describe '#track_experiment_event', :snowplow do
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 0a6af753283..1ece40c80f2 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -6002,6 +6002,43 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#set_first_pages_deployment!' do
+ let(:project) { create(:project) }
+ let(:deployment) { create(:pages_deployment, project: project) }
+
+ it "creates new metadata record if none exists yet and sets deployment" do
+ project.pages_metadatum.destroy!
+ project.reload
+
+ project.set_first_pages_deployment!(deployment)
+
+ expect(project.pages_metadatum.reload.pages_deployment).to eq(deployment)
+ end
+
+ it "updates the existing metadara record with deployment" do
+ expect do
+ project.set_first_pages_deployment!(deployment)
+ end.to change { project.pages_metadatum.reload.pages_deployment }.from(nil).to(deployment)
+ end
+
+ it 'only updates metadata for this project' do
+ other_project = create(:project)
+
+ expect do
+ project.set_first_pages_deployment!(deployment)
+ end.not_to change { other_project.pages_metadatum.reload.pages_deployment }.from(nil)
+ end
+
+ it 'does nothing if metadata already references some deployment' do
+ existing_deployment = create(:pages_deployment, project: project)
+ project.set_first_pages_deployment!(existing_deployment)
+
+ expect do
+ project.set_first_pages_deployment!(deployment)
+ end.not_to change { project.pages_metadatum.reload.pages_deployment }.from(existing_deployment)
+ end
+ end
+
describe '#has_pool_repsitory?' do
it 'returns false when it does not have a pool repository' do
subject = create(:project, :repository)
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index de52f8df31b..971ab176cfa 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -94,6 +94,7 @@ RSpec.describe Ci::CreatePipelineService do
describe 'recording a conversion event' do
it 'schedules a record conversion event worker' do
expect(Experiments::RecordConversionEventWorker).to receive(:perform_async).with(:ci_syntax_templates, user.id)
+ expect(Experiments::RecordConversionEventWorker).to receive(:perform_async).with(:pipelines_empty_state, user.id)
pipeline
end
diff --git a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
new file mode 100644
index 00000000000..0c6afec30db
--- /dev/null
+++ b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::MigrateLegacyStorageToDeploymentService do
+ let(:project) { create(:project, :repository) }
+ let(:service) { described_class.new(project) }
+
+ it 'marks pages as not deployed if public directory is absent' do
+ project.mark_pages_as_deployed
+
+ expect do
+ service.execute
+ end.to change { project.pages_metadatum.reload.deployed }.from(true).to(false)
+ end
+
+ it 'does not mark pages as not deployed if public directory is absent but pages_deployment exists' do
+ deployment = create(:pages_deployment, project: project)
+ project.update_pages_deployment!(deployment)
+ project.mark_pages_as_deployed
+
+ expect do
+ service.execute
+ end.not_to change { project.pages_metadatum.reload.deployed }.from(true)
+ end
+
+ it 'does not mark pages as not deployed if public directory is absent but feature is disabled' do
+ stub_feature_flags(pages_migration_mark_as_not_deployed: false)
+
+ project.mark_pages_as_deployed
+
+ expect do
+ service.execute
+ end.not_to change { project.pages_metadatum.reload.deployed }.from(true)
+ end
+
+ it 'removes pages archive when can not save deployment' do
+ archive = fixture_file_upload("spec/fixtures/pages.zip")
+ expect_next_instance_of(::Pages::ZipDirectoryService) do |zip_service|
+ expect(zip_service).to receive(:execute).and_return([archive.path, 3])
+ end
+
+ expect_next_instance_of(PagesDeployment) do |deployment|
+ expect(deployment).to receive(:save!).and_raise("Something")
+ end
+
+ expect do
+ service.execute
+ end.to raise_error("Something")
+
+ expect(File.exist?(archive.path)).to eq(false)
+ end
+
+ context 'when pages site is deployed to legacy storage' do
+ before do
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+ end
+
+ it 'creates pages deployment' do
+ expect do
+ described_class.new(project).execute
+ end.to change { project.reload.pages_deployments.count }.by(1)
+
+ deployment = project.pages_metadatum.pages_deployment
+
+ Zip::File.open(deployment.file.path) do |zip_file|
+ expect(zip_file.glob("public").first.ftype).to eq(:directory)
+ expect(zip_file.glob("public/index.html").first.get_input_stream.read).to eq("Hello!")
+ end
+
+ expect(deployment.file_count).to eq(2)
+ expect(deployment.file_sha256).to eq(Digest::SHA256.file(deployment.file.path).hexdigest)
+ end
+
+ it 'removes tmp pages archive' do
+ described_class.new(project).execute
+
+ expect(File.exist?(File.join(project.pages_path, '@migrated.zip'))).to eq(false)
+ end
+
+ it 'does not change pages deployment if it is set' do
+ old_deployment = create(:pages_deployment, project: project)
+ project.update_pages_deployment!(old_deployment)
+
+ expect do
+ described_class.new(project).execute
+ end.not_to change { project.pages_metadatum.reload.pages_deployment_id }.from(old_deployment.id)
+ end
+
+ it 'raises exception if exclusive lease is taken' do
+ described_class.new(project).try_obtain_lease do
+ expect do
+ described_class.new(project).execute
+ end.to raise_error(described_class::ExclusiveLeaseTaken)
+ end
+ end
+ end
+end
diff --git a/spec/services/pages/zip_directory_service_spec.rb b/spec/services/pages/zip_directory_service_spec.rb
index 1568103d102..efaa6a48647 100644
--- a/spec/services/pages/zip_directory_service_spec.rb
+++ b/spec/services/pages/zip_directory_service_spec.rb
@@ -3,207 +3,217 @@
require 'spec_helper'
RSpec.describe Pages::ZipDirectoryService do
- around do |example|
- Dir.mktmpdir do |dir|
- @work_dir = dir
- example.run
- end
- end
-
- let(:result) do
- described_class.new(@work_dir).execute
+ it 'raises error if project pages dir does not exist' do
+ expect do
+ described_class.new("/tmp/not/existing/dir").execute
+ end.to raise_error(described_class::InvalidArchiveError)
end
- let(:archive) { result.first }
- let(:entries_count) { result.second }
+ context 'when work dir exists' do
+ around do |example|
+ Dir.mktmpdir do |dir|
+ @work_dir = dir
+ example.run
+ end
+ end
- it 'raises error if there is no public directory' do
- expect { archive }.to raise_error(described_class::InvalidArchiveError)
- end
+ let(:result) do
+ described_class.new(@work_dir).execute
+ end
- it 'raises error if public directory is a symlink' do
- create_dir('target')
- create_file('./target/index.html', 'hello')
- create_link("public", "./target")
+ let(:archive) { result.first }
+ let(:entries_count) { result.second }
- expect { archive }.to raise_error(described_class::InvalidArchiveError)
- end
+ it 'raises error if there is no public directory and does not leave archive' do
+ expect { archive }.to raise_error(described_class::InvalidArchiveError)
- context 'when there is a public directory' do
- before do
- create_dir('public')
+ expect(File.exist?(File.join(@work_dir, '@migrated.zip'))).to eq(false)
end
- it 'creates the file next the public directory' do
- expect(archive).to eq(File.join(@work_dir, "@migrated.zip"))
+ it 'raises error if public directory is a symlink' do
+ create_dir('target')
+ create_file('./target/index.html', 'hello')
+ create_link("public", "./target")
+
+ expect { archive }.to raise_error(described_class::InvalidArchiveError)
end
- it 'includes public directory' do
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/")
- expect(entry.ftype).to eq(:directory)
+ context 'when there is a public directory' do
+ before do
+ create_dir('public')
end
- end
- it 'returns number of entries' do
- create_file("public/index.html", "hello")
- create_link("public/link.html", "./index.html")
- expect(entries_count).to eq(3) # + 'public' directory
- end
+ it 'creates the file next the public directory' do
+ expect(archive).to eq(File.join(@work_dir, "@migrated.zip"))
+ end
- it 'removes the old file if it exists' do
- # simulate the old run
- described_class.new(@work_dir).execute
+ it 'includes public directory' do
+ with_zip_file do |zip_file|
+ entry = zip_file.get_entry("public/")
+ expect(entry.ftype).to eq(:directory)
+ end
+ end
- with_zip_file do |zip_file|
- expect(zip_file.entries.count).to eq(1)
+ it 'returns number of entries' do
+ create_file("public/index.html", "hello")
+ create_link("public/link.html", "./index.html")
+ expect(entries_count).to eq(3) # + 'public' directory
end
- end
- it 'ignores other top level files and directories' do
- create_file("top_level.html", "hello")
- create_dir("public2")
+ it 'removes the old file if it exists' do
+ # simulate the old run
+ described_class.new(@work_dir).execute
- with_zip_file do |zip_file|
- expect { zip_file.get_entry("top_level.html") }.to raise_error(Errno::ENOENT)
- expect { zip_file.get_entry("public2/") }.to raise_error(Errno::ENOENT)
+ with_zip_file do |zip_file|
+ expect(zip_file.entries.count).to eq(1)
+ end
end
- end
- it 'includes index.html file' do
- create_file("public/index.html", "Hello!")
+ it 'ignores other top level files and directories' do
+ create_file("top_level.html", "hello")
+ create_dir("public2")
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/index.html")
- expect(zip_file.read(entry)).to eq("Hello!")
+ with_zip_file do |zip_file|
+ expect { zip_file.get_entry("top_level.html") }.to raise_error(Errno::ENOENT)
+ expect { zip_file.get_entry("public2/") }.to raise_error(Errno::ENOENT)
+ end
end
- end
- it 'includes hidden file' do
- create_file("public/.hidden.html", "Hello!")
+ it 'includes index.html file' do
+ create_file("public/index.html", "Hello!")
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/.hidden.html")
- expect(zip_file.read(entry)).to eq("Hello!")
+ with_zip_file do |zip_file|
+ entry = zip_file.get_entry("public/index.html")
+ expect(zip_file.read(entry)).to eq("Hello!")
+ end
end
- end
- it 'includes nested directories and files' do
- create_dir("public/nested")
- create_dir("public/nested/nested2")
- create_file("public/nested/nested2/nested.html", "Hello nested")
+ it 'includes hidden file' do
+ create_file("public/.hidden.html", "Hello!")
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/nested")
- expect(entry.ftype).to eq(:directory)
+ with_zip_file do |zip_file|
+ entry = zip_file.get_entry("public/.hidden.html")
+ expect(zip_file.read(entry)).to eq("Hello!")
+ end
+ end
- entry = zip_file.get_entry("public/nested/nested2")
- expect(entry.ftype).to eq(:directory)
+ it 'includes nested directories and files' do
+ create_dir("public/nested")
+ create_dir("public/nested/nested2")
+ create_file("public/nested/nested2/nested.html", "Hello nested")
- entry = zip_file.get_entry("public/nested/nested2/nested.html")
- expect(zip_file.read(entry)).to eq("Hello nested")
+ with_zip_file do |zip_file|
+ entry = zip_file.get_entry("public/nested")
+ expect(entry.ftype).to eq(:directory)
+
+ entry = zip_file.get_entry("public/nested/nested2")
+ expect(entry.ftype).to eq(:directory)
+
+ entry = zip_file.get_entry("public/nested/nested2/nested.html")
+ expect(zip_file.read(entry)).to eq("Hello nested")
+ end
end
- end
- it 'adds a valid symlink' do
- create_file("public/target.html", "hello")
- create_link("public/link.html", "./target.html")
+ it 'adds a valid symlink' do
+ create_file("public/target.html", "hello")
+ create_link("public/link.html", "./target.html")
- with_zip_file do |zip_file|
- entry = zip_file.get_entry("public/link.html")
- expect(entry.ftype).to eq(:symlink)
- expect(zip_file.read(entry)).to eq("./target.html")
+ with_zip_file do |zip_file|
+ entry = zip_file.get_entry("public/link.html")
+ expect(entry.ftype).to eq(:symlink)
+ expect(zip_file.read(entry)).to eq("./target.html")
+ end
end
- end
- it 'ignores the symlink pointing outside of public directory' do
- create_file("target.html", "hello")
- create_link("public/link.html", "../target.html")
+ it 'ignores the symlink pointing outside of public directory' do
+ create_file("target.html", "hello")
+ create_link("public/link.html", "../target.html")
- with_zip_file do |zip_file|
- expect { zip_file.get_entry("public/link.html") }.to raise_error(Errno::ENOENT)
+ with_zip_file do |zip_file|
+ expect { zip_file.get_entry("public/link.html") }.to raise_error(Errno::ENOENT)
+ end
end
- end
- it 'ignores the symlink if target is absent' do
- create_link("public/link.html", "./target.html")
+ it 'ignores the symlink if target is absent' do
+ create_link("public/link.html", "./target.html")
- with_zip_file do |zip_file|
- expect { zip_file.get_entry("public/link.html") }.to raise_error(Errno::ENOENT)
+ with_zip_file do |zip_file|
+ expect { zip_file.get_entry("public/link.html") }.to raise_error(Errno::ENOENT)
+ end
end
- end
- it 'ignores symlink if is absolute and points to outside of directory' do
- target = File.join(@work_dir, "target")
- FileUtils.touch(target)
+ it 'ignores symlink if is absolute and points to outside of directory' do
+ target = File.join(@work_dir, "target")
+ FileUtils.touch(target)
- create_link("public/link.html", target)
+ create_link("public/link.html", target)
- with_zip_file do |zip_file|
- expect { zip_file.get_entry("public/link.html") }.to raise_error(Errno::ENOENT)
+ with_zip_file do |zip_file|
+ expect { zip_file.get_entry("public/link.html") }.to raise_error(Errno::ENOENT)
+ end
end
- end
- it "includes raw symlink if it's target is a valid directory" do
- create_dir("public/target")
- create_file("public/target/index.html", "hello")
- create_link("public/link", "./target")
+ it "includes raw symlink if it's target is a valid directory" do
+ create_dir("public/target")
+ create_file("public/target/index.html", "hello")
+ create_link("public/link", "./target")
- with_zip_file do |zip_file|
- expect(zip_file.entries.count).to eq(4) # /public and 3 created above
+ with_zip_file do |zip_file|
+ expect(zip_file.entries.count).to eq(4) # /public and 3 created above
- entry = zip_file.get_entry("public/link")
- expect(entry.ftype).to eq(:symlink)
- expect(zip_file.read(entry)).to eq("./target")
+ entry = zip_file.get_entry("public/link")
+ expect(entry.ftype).to eq(:symlink)
+ expect(zip_file.read(entry)).to eq("./target")
+ end
end
end
- end
- context "validating fixtures pages archives" do
- using RSpec::Parameterized::TableSyntax
+ context "validating fixtures pages archives" do
+ using RSpec::Parameterized::TableSyntax
- where(:fixture_path) do
- ["spec/fixtures/pages.zip", "spec/fixtures/pages_non_writeable.zip"]
- end
+ where(:fixture_path) do
+ ["spec/fixtures/pages.zip", "spec/fixtures/pages_non_writeable.zip"]
+ end
- with_them do
- let(:full_fixture_path) { Rails.root.join(fixture_path) }
+ with_them do
+ let(:full_fixture_path) { Rails.root.join(fixture_path) }
- it 'a created archives contains exactly the same entries' do
- SafeZip::Extract.new(full_fixture_path).extract(directories: ['public'], to: @work_dir)
+ it 'a created archives contains exactly the same entries' do
+ SafeZip::Extract.new(full_fixture_path).extract(directories: ['public'], to: @work_dir)
- with_zip_file do |created_archive|
- Zip::File.open(full_fixture_path) do |original_archive|
- original_archive.entries do |original_entry|
- created_entry = created_archive.get_entry(original_entry.name)
+ with_zip_file do |created_archive|
+ Zip::File.open(full_fixture_path) do |original_archive|
+ original_archive.entries do |original_entry|
+ created_entry = created_archive.get_entry(original_entry.name)
- expect(created_entry.name).to eq(original_entry.name)
- expect(created_entry.ftype).to eq(original_entry.ftype)
- expect(created_archive.read(created_entry)).to eq(original_archive.read(original_entry))
+ expect(created_entry.name).to eq(original_entry.name)
+ expect(created_entry.ftype).to eq(original_entry.ftype)
+ expect(created_archive.read(created_entry)).to eq(original_archive.read(original_entry))
+ end
end
end
end
end
end
- end
- def create_file(name, content)
- File.open(File.join(@work_dir, name), "w") do |f|
- f.write(content)
+ def create_file(name, content)
+ File.open(File.join(@work_dir, name), "w") do |f|
+ f.write(content)
+ end
end
- end
- def create_dir(dir)
- Dir.mkdir(File.join(@work_dir, dir))
- end
+ def create_dir(dir)
+ Dir.mkdir(File.join(@work_dir, dir))
+ end
- def create_link(new_name, target)
- File.symlink(target, File.join(@work_dir, new_name))
- end
+ def create_link(new_name, target)
+ File.symlink(target, File.join(@work_dir, new_name))
+ end
- def with_zip_file
- Zip::File.open(archive) do |zip_file|
- yield zip_file
+ def with_zip_file
+ Zip::File.open(archive) do |zip_file|
+ yield zip_file
+ end
end
end
end
diff --git a/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
index 5748e873fd4..460e8d57a2b 100644
--- a/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/resolvable_discussions_shared_examples.rb
@@ -9,6 +9,7 @@ RSpec.shared_examples 'resolvable discussions API' do |parent_type, noteable_typ
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['notes'].size).to eq(1)
expect(json_response['notes'][0]['resolved']).to eq(true)
+ expect(Time.parse(json_response['notes'][0]['resolved_at'])).to be_like_time(note.reload.resolved_at)
end
it "unresolves discussion if resolved is false" do
@@ -18,6 +19,7 @@ RSpec.shared_examples 'resolvable discussions API' do |parent_type, noteable_typ
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['notes'].size).to eq(1)
expect(json_response['notes'][0]['resolved']).to eq(false)
+ expect(json_response['notes'][0]['resolved_at']).to be_nil
end
it "returns a 400 bad request error if resolved parameter is not passed" do
diff --git a/spec/workers/experiments/record_conversion_event_worker_spec.rb b/spec/workers/experiments/record_conversion_event_worker_spec.rb
index 2cab392074f..05e4ebc13ba 100644
--- a/spec/workers/experiments/record_conversion_event_worker_spec.rb
+++ b/spec/workers/experiments/record_conversion_event_worker_spec.rb
@@ -12,10 +12,14 @@ RSpec.describe Experiments::RecordConversionEventWorker, '#perform' do
context 'when the experiment is active' do
let(:experiment_active) { true }
- it 'records the event' do
- expect(Experiment).to receive(:record_conversion_event).with(:experiment_key, 1234)
+ include_examples 'an idempotent worker' do
+ subject { perform }
- perform
+ it 'records the event' do
+ expect(Experiment).to receive(:record_conversion_event).with(:experiment_key, 1234)
+
+ perform
+ end
end
end