diff options
author | GitLab Bot <gitlab-bot@gitlab.com> | 2020-05-14 18:08:06 +0000 |
---|---|---|
committer | GitLab Bot <gitlab-bot@gitlab.com> | 2020-05-14 18:08:06 +0000 |
commit | 30b17460a2569734cf04dae1b2841d3654b2c0ec (patch) | |
tree | 86eb199f8a21eadeff824b26a8736986266a9a31 | |
parent | 87f286558de1f5790b0b1742f10548387b5d147a (diff) | |
download | gitlab-ce-30b17460a2569734cf04dae1b2841d3654b2c0ec.tar.gz |
Add latest changes from gitlab-org/gitlab@master
62 files changed, 781 insertions, 193 deletions
diff --git a/README.md b/README.md index 1372e47d52f..51a54c3bbff 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,7 @@ GitLab is a Ruby on Rails application that runs on the following software: - Ruby (MRI) 2.6.5 - Git 2.8.4+ - Redis 2.8+ -- PostgreSQL 9.6+ +- PostgreSQL 11+ For more information please see the [architecture](https://docs.gitlab.com/ee/development/architecture.html) and [requirements](https://docs.gitlab.com/ee/install/requirements.html) documentation. diff --git a/app/assets/javascripts/releases/components/release_block_footer.vue b/app/assets/javascripts/releases/components/release_block_footer.vue index a95fbc0b373..26154272d39 100644 --- a/app/assets/javascripts/releases/components/release_block_footer.vue +++ b/app/assets/javascripts/releases/components/release_block_footer.vue @@ -57,6 +57,11 @@ export default { ? sprintf(__("%{username}'s avatar"), { username: this.author.username }) : null; }, + createdTime() { + const now = new Date(); + const isFuture = now < new Date(this.releasedAt); + return isFuture ? __('Will be created') : __('Created'); + }, }, }; </script> @@ -86,7 +91,7 @@ export default { v-if="releasedAt || author" class="float-left d-flex align-items-center js-author-date-info" > - <span class="text-secondary">{{ __('Created') }} </span> + <span class="text-secondary">{{ createdTime }} </span> <template v-if="releasedAt"> <span v-gl-tooltip.bottom diff --git a/app/assets/javascripts/releases/components/release_block_metadata.vue b/app/assets/javascripts/releases/components/release_block_metadata.vue index 052e4088a5f..40133941011 100644 --- a/app/assets/javascripts/releases/components/release_block_metadata.vue +++ b/app/assets/javascripts/releases/components/release_block_metadata.vue @@ -38,9 +38,12 @@ export default { return Boolean(this.author); }, releasedTimeAgo() { - return sprintf(__('released %{time}'), { - time: this.timeFormatted(this.release.releasedAt), - }); + const now = new Date(); + const isFuture = now < new Date(this.release.releasedAt); + const time = this.timeFormatted(this.release.releasedAt); + return isFuture + ? sprintf(__('will be released %{time}'), { time }) + : sprintf(__('released %{time}'), { time }); }, shouldRenderMilestones() { return Boolean(this.release.milestones?.length); @@ -74,7 +77,11 @@ export default { <div class="append-right-4"> • - <span v-gl-tooltip.bottom :title="tooltipTitle(release.releasedAt)"> + <span + v-gl-tooltip.bottom + class="js-release-date-info" + :title="tooltipTitle(release.releasedAt)" + > {{ releasedTimeAgo }} </span> </div> diff --git a/app/models/ci/group.rb b/app/models/ci/group.rb index 15dc1ca8954..4b2081f2977 100644 --- a/app/models/ci/group.rb +++ b/app/models/ci/group.rb @@ -46,7 +46,7 @@ module Ci end def self.fabricate(project, stage) - stage.statuses.ordered.latest + stage.latest_statuses .sort_by(&:sortable_name).group_by(&:group_name) .map do |group_name, grouped_statuses| self.new(project, stage, name: group_name, jobs: grouped_statuses) diff --git a/app/models/ci/legacy_stage.rb b/app/models/ci/legacy_stage.rb index f156219ea81..250306e2be4 100644 --- a/app/models/ci/legacy_stage.rb +++ b/app/models/ci/legacy_stage.rb @@ -41,6 +41,10 @@ module Ci .fabricate! end + def latest_statuses + statuses.ordered.latest + end + def statuses @statuses ||= pipeline.statuses.where(stage: name) end diff --git a/app/models/ci/processable.rb b/app/models/ci/processable.rb index 00931d7e758..482d061c7cf 100644 --- a/app/models/ci/processable.rb +++ b/app/models/ci/processable.rb @@ -83,7 +83,7 @@ module Ci # Overriding scheduling_type enum's method for nil `scheduling_type`s def scheduling_type_dag? - super || find_legacy_scheduling_type == :dag + scheduling_type.nil? ? find_legacy_scheduling_type == :dag : super end # scheduling_type column of previous builds/bridges have not been populated, diff --git a/app/models/ci/stage.rb b/app/models/ci/stage.rb index d3325a47053..a316b4718e0 100644 --- a/app/models/ci/stage.rb +++ b/app/models/ci/stage.rb @@ -13,6 +13,7 @@ module Ci belongs_to :pipeline has_many :statuses, class_name: 'CommitStatus', foreign_key: :stage_id + has_many :latest_statuses, -> { ordered.latest }, class_name: 'CommitStatus', foreign_key: :stage_id has_many :processables, class_name: 'Ci::Processable', foreign_key: :stage_id has_many :builds, foreign_key: :stage_id has_many :bridges, foreign_key: :stage_id diff --git a/app/presenters/README.md b/app/presenters/README.md index dc4173a880e..62aec4fc8a2 100644 --- a/app/presenters/README.md +++ b/app/presenters/README.md @@ -8,7 +8,7 @@ methods from models to presenters. ### When your view is full of logic -When your view is full of logic (`if`, `else`, `select` on arrays etc.), it's +When your view is full of logic (`if`, `else`, `select` on arrays, etc.), it's time to create a presenter! ### When your model has a lot of view-related logic/data methods @@ -27,11 +27,11 @@ Presenters should be used for: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/7073/diffs. - Data and logic methods that can be pulled from models. - Simple text output methods: it's ok if the method returns a string, but not a - whole DOM element for which we'd need HAML, a view context, helpers etc. + whole DOM element for which we'd need HAML, a view context, helpers, etc. ## Why use presenters instead of model concerns? -We should strive to follow the single-responsibility principle, and view-related +We should strive to follow the single-responsibility principle and view-related logic/data methods are definitely not the responsibility of models! Another reason is as follows: @@ -52,22 +52,22 @@ we gain the following benefits: - rules are more explicit and centralized in the presenter => improves security - testing is easier and faster as presenters are Plain Old Ruby Object (PORO) - views are more readable and maintainable -- decreases number of CE -> EE merge conflicts since code is in separate files +- decreases the number of CE -> EE merge conflicts since code is in separate files - moves the conflicts from views (not always obvious) to presenters (a lot easier to resolve) ## What not to do with presenters? - Don't use helpers in presenters. Presenters are not aware of the view context. -- Don't generate complex DOM elements, forms etc. with presenters. Presenters - can return simple data as texts, and URLs using URL helpers from - `Gitlab::Routing` but nothing much more fancy. +- Don't generate complex DOM elements, forms, etc. with presenters. Presenters + can return simple data like texts, and URLs using URL helpers from + `Gitlab::Routing` but nothing much fancier. ## Implementation ### Presenter definition Every presenter should inherit from `Gitlab::View::Presenter::Simple`, which -provides a `.presents` method which allows you to define an accessor for the +provides a `.presents` the method which allows you to define an accessor for the presented object. It also includes common helpers like `Gitlab::Routing` and `Gitlab::Allowable`. diff --git a/app/serializers/ci/dag_job_entity.rb b/app/serializers/ci/dag_job_entity.rb new file mode 100644 index 00000000000..b4947319ed1 --- /dev/null +++ b/app/serializers/ci/dag_job_entity.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +module Ci + class DagJobEntity < Grape::Entity + expose :name + + expose :needs, if: -> (job, _) { job.scheduling_type_dag? } do |job| + job.needs.pluck(:name) # rubocop: disable CodeReuse/ActiveRecord + end + end +end diff --git a/app/serializers/ci/dag_job_group_entity.rb b/app/serializers/ci/dag_job_group_entity.rb new file mode 100644 index 00000000000..ac1ed89281c --- /dev/null +++ b/app/serializers/ci/dag_job_group_entity.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +module Ci + class DagJobGroupEntity < Grape::Entity + expose :name + expose :size + expose :jobs, with: Ci::DagJobEntity + end +end diff --git a/app/serializers/ci/dag_pipeline_entity.rb b/app/serializers/ci/dag_pipeline_entity.rb new file mode 100644 index 00000000000..b615dd2b194 --- /dev/null +++ b/app/serializers/ci/dag_pipeline_entity.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module Ci + class DagPipelineEntity < Grape::Entity + expose :ordered_stages_with_preloads, as: :stages, using: Ci::DagStageEntity + + private + + def ordered_stages_with_preloads + object.ordered_stages.preload(preloaded_relations) # rubocop: disable CodeReuse/ActiveRecord + end + + def preloaded_relations + [ + :project, + { latest_statuses: :needs } + ] + end + end +end diff --git a/app/serializers/ci/dag_pipeline_serializer.rb b/app/serializers/ci/dag_pipeline_serializer.rb new file mode 100644 index 00000000000..0c9e9a9db69 --- /dev/null +++ b/app/serializers/ci/dag_pipeline_serializer.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +module Ci + class DagPipelineSerializer < BaseSerializer + entity Ci::DagPipelineEntity + end +end diff --git a/app/serializers/ci/dag_stage_entity.rb b/app/serializers/ci/dag_stage_entity.rb new file mode 100644 index 00000000000..c7969da6c3c --- /dev/null +++ b/app/serializers/ci/dag_stage_entity.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +module Ci + class DagStageEntity < Grape::Entity + expose :name + + expose :groups, with: Ci::DagJobGroupEntity + end +end diff --git a/app/serializers/merge_request_serializer.rb b/app/serializers/merge_request_serializer.rb index 9fd50c8c51d..508a2510dbd 100644 --- a/app/serializers/merge_request_serializer.rb +++ b/app/serializers/merge_request_serializer.rb @@ -27,3 +27,5 @@ class MergeRequestSerializer < BaseSerializer super(merge_request, opts, entity) end end + +MergeRequestSerializer.prepend_if_ee('EE::MergeRequestSerializer') diff --git a/changelogs/unreleased/199046-text-for-future-release-date-grammatically-incorrect.yml b/changelogs/unreleased/199046-text-for-future-release-date-grammatically-incorrect.yml new file mode 100644 index 00000000000..7cc1f6daa28 --- /dev/null +++ b/changelogs/unreleased/199046-text-for-future-release-date-grammatically-incorrect.yml @@ -0,0 +1,5 @@ +--- +title: Resolve Text for future Release date grammatically incorrect +merge_request: 28075 +author: +type: fixed diff --git a/doc/administration/database_load_balancing.md b/doc/administration/database_load_balancing.md index 1bc848e537a..0f566fcc114 100644 --- a/doc/administration/database_load_balancing.md +++ b/doc/administration/database_load_balancing.md @@ -26,9 +26,9 @@ sent to the primary (unless necessary), the primary (`db3`) hardly has any load. ## Requirements -For load balancing to work you will need at least PostgreSQL 9.2 or newer, +For load balancing to work you will need at least PostgreSQL 11 or newer, [**MySQL is not supported**](../install/requirements.md#database). You also need to make sure that you have -at least 1 secondary in [hot standby](https://www.postgresql.org/docs/9.6/hot-standby.html) mode. +at least 1 secondary in [hot standby](https://www.postgresql.org/docs/11/hot-standby.html) mode. Load balancing also requires that the configured hosts **always** point to the primary, even after a database failover. Furthermore, the additional hosts to diff --git a/doc/administration/external_database.md b/doc/administration/external_database.md index 13c9ef872f8..47509828c20 100644 --- a/doc/administration/external_database.md +++ b/doc/administration/external_database.md @@ -13,6 +13,11 @@ If you use a cloud-managed service, or provide your own PostgreSQL instance: [database requirements document](../install/requirements.md#database). 1. Set up a `gitlab` username with a password of your choice. The `gitlab` user needs privileges to create the `gitlabhq_production` database. +1. If you are using a cloud-managed service, you may need to grant additional + roles to your `gitlab` user: + - Amazon RDS requires the [`rds_superuser`](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Appendix.PostgreSQL.CommonDBATasks.html#Appendix.PostgreSQL.CommonDBATasks.Roles) role. + - Azure Database for PostgreSQL requires the [`azure_pg_admin`](https://docs.microsoft.com/en-us/azure/postgresql/howto-create-users#how-to-create-additional-admin-users-in-azure-database-for-postgresql) role. + 1. Configure the GitLab application servers with the appropriate connection details for your external PostgreSQL service in your `/etc/gitlab/gitlab.rb` file: diff --git a/doc/administration/geo/replication/database.md b/doc/administration/geo/replication/database.md index 4eac22e194d..62bd0e6ac19 100644 --- a/doc/administration/geo/replication/database.md +++ b/doc/administration/geo/replication/database.md @@ -33,9 +33,9 @@ recover. See below for more details. The following guide assumes that: -- You are using Omnibus and therefore you are using PostgreSQL 9.6 or later - which includes the [`pg_basebackup` tool](https://www.postgresql.org/docs/9.6/app-pgbasebackup.html) and improved - [Foreign Data Wrapper](https://www.postgresql.org/docs/9.6/postgres-fdw.html) support. +- You are using Omnibus and therefore you are using PostgreSQL 11 or later + which includes the [`pg_basebackup` tool](https://www.postgresql.org/docs/11/app-pgbasebackup.html) and improved + [Foreign Data Wrapper](https://www.postgresql.org/docs/11/postgres-fdw.html) support. - You have a **primary** node already set up (the GitLab server you are replicating from), running Omnibus' PostgreSQL (or equivalent version), and you have a new **secondary** server set up with the same versions of the OS, @@ -160,7 +160,7 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o `postgresql['md5_auth_cidr_addresses']` and `postgresql['listen_address']`. The `listen_address` option opens PostgreSQL up to network connections with the interface - corresponding to the given address. See [the PostgreSQL documentation](https://www.postgresql.org/docs/9.6/runtime-config-connection.html) + corresponding to the given address. See [the PostgreSQL documentation](https://www.postgresql.org/docs/11/runtime-config-connection.html) for more details. Depending on your network configuration, the suggested addresses may not @@ -213,7 +213,7 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o ``` You may also want to edit the `wal_keep_segments` and `max_wal_senders` to match your - database replication requirements. Consult the [PostgreSQL - Replication documentation](https://www.postgresql.org/docs/9.6/runtime-config-replication.html) + database replication requirements. Consult the [PostgreSQL - Replication documentation](https://www.postgresql.org/docs/11/runtime-config-replication.html) for more information. 1. Save the file and reconfigure GitLab for the database listen changes and @@ -442,7 +442,7 @@ data before running `pg_basebackup`. (e.g., you know the network path is secure, or you are using a site-to-site VPN). This is **not** safe over the public Internet! - You can read more details about each `sslmode` in the - [PostgreSQL documentation](https://www.postgresql.org/docs/9.6/libpq-ssl.html#LIBPQ-SSL-PROTECTION); + [PostgreSQL documentation](https://www.postgresql.org/docs/11/libpq-ssl.html#LIBPQ-SSL-PROTECTION); the instructions above are carefully written to ensure protection against both passive eavesdroppers and active "man-in-the-middle" attackers. - Change the `--slot-name` to the name of the replication slot @@ -464,7 +464,7 @@ high-availability configuration with a cluster of nodes supporting a Geo information, see [High Availability with Omnibus GitLab](../../high_availability/database.md#high-availability-with-omnibus-gitlab-premium-only). For a Geo **secondary** node to work properly with PgBouncer in front of the database, -it will need a separate read-only user to make [PostgreSQL FDW queries](https://www.postgresql.org/docs/9.6/postgres-fdw.html) +it will need a separate read-only user to make [PostgreSQL FDW queries](https://www.postgresql.org/docs/11/postgres-fdw.html) work: 1. On the **primary** Geo database, enter the PostgreSQL on the console as an diff --git a/doc/administration/geo/replication/external_database.md b/doc/administration/geo/replication/external_database.md index b571d3a173f..b2293684bf6 100644 --- a/doc/administration/geo/replication/external_database.md +++ b/doc/administration/geo/replication/external_database.md @@ -157,7 +157,7 @@ when `roles ['geo_secondary_role']` is set. For high availability, refer to [Geo High Availability](../../reference_architectures/index.md). If you want to run this database external to Omnibus, please follow the instructions below. -The tracking database requires an [FDW](https://www.postgresql.org/docs/9.6/postgres-fdw.html) +The tracking database requires an [FDW](https://www.postgresql.org/docs/11/postgres-fdw.html) connection with the **secondary** replica database for improved performance. If you have an external database ready to be used as the tracking database, @@ -211,7 +211,7 @@ the tracking database on port 5432. gitlab-rake geo:db:migrate ``` -1. Configure the [PostgreSQL FDW](https://www.postgresql.org/docs/9.6/postgres-fdw.html) +1. Configure the [PostgreSQL FDW](https://www.postgresql.org/docs/11/postgres-fdw.html) connection and credentials: Save the script below in a file, ex. `/tmp/geo_fdw.sh` and modify the connection diff --git a/doc/administration/geo/replication/index.md b/doc/administration/geo/replication/index.md index 728e96cb605..63c81071cf3 100644 --- a/doc/administration/geo/replication/index.md +++ b/doc/administration/geo/replication/index.md @@ -110,7 +110,7 @@ The following are required to run Geo: The following operating systems are known to ship with a current version of OpenSSH: - [CentOS](https://www.centos.org) 7.4+ - [Ubuntu](https://ubuntu.com) 16.04+ -- PostgreSQL 9.6+ with [FDW](https://www.postgresql.org/docs/9.6/postgres-fdw.html) support and [Streaming Replication](https://wiki.postgresql.org/wiki/Streaming_Replication) +- PostgreSQL 11+ with [FDW](https://www.postgresql.org/docs/11/postgres-fdw.html) support and [Streaming Replication](https://wiki.postgresql.org/wiki/Streaming_Replication) - Git 2.9+ - All nodes must run the same GitLab version. diff --git a/doc/administration/geo/replication/security_review.md b/doc/administration/geo/replication/security_review.md index e2cfe48f2ea..0ac8157220a 100644 --- a/doc/administration/geo/replication/security_review.md +++ b/doc/administration/geo/replication/security_review.md @@ -177,7 +177,7 @@ from [owasp.org](https://owasp.org/). ### What databases and application servers support the application? -- PostgreSQL >= 9.6, Redis, Sidekiq, Puma. +- PostgreSQL >= 11, Redis, Sidekiq, Puma. ### How will database connection strings, encryption keys, and other sensitive components be stored, accessed, and protected from unauthorized detection? diff --git a/doc/administration/gitaly/praefect.md b/doc/administration/gitaly/praefect.md index 77655b723e9..124f495187b 100644 --- a/doc/administration/gitaly/praefect.md +++ b/doc/administration/gitaly/praefect.md @@ -56,7 +56,7 @@ for improvements including The minimum recommended configuration for a Gitaly Cluster requires: - 1 highly available load balancer -- 1 highly available PostgreSQL server (PostgreSQL 9.6 or newer) +- 1 highly available PostgreSQL server (PostgreSQL 11 or newer) - 3 Praefect nodes - 3 Gitaly nodes (1 primary, 2 secondary) @@ -82,7 +82,7 @@ package (highly recommended), follow the steps below: Before beginning, you should already have a working GitLab instance. [Learn how to install GitLab](https://about.gitlab.com/install/). -Provision a PostgreSQL server (PostgreSQL 9.6 or newer). Configuration through +Provision a PostgreSQL server (PostgreSQL 11 or newer). Configuration through the Omnibus GitLab distribution is not yet supported. Follow this [issue](https://gitlab.com/gitlab-org/gitaly/issues/2476) for updates. @@ -138,7 +138,7 @@ of GitLab and should not be replicated. To complete this section you will need: - 1 Praefect node -- 1 PostgreSQL server (PostgreSQL 9.6 or newer) +- 1 PostgreSQL server (PostgreSQL 11 or newer) - An SQL user with permissions to create databases During this section, we will configure the PostgreSQL server, from the Praefect diff --git a/doc/administration/high_availability/database.md b/doc/administration/high_availability/database.md index a8728c8ab3a..6f1873af993 100644 --- a/doc/administration/high_availability/database.md +++ b/doc/administration/high_availability/database.md @@ -969,7 +969,7 @@ repmgr['trust_auth_cidr_addresses'] = %w(192.168.1.44/32 db2.example.com) ##### MD5 Authentication If you are running on an untrusted network, repmgr can use md5 authentication -with a [`.pgpass` file](https://www.postgresql.org/docs/9.6/libpq-pgpass.html) +with a [`.pgpass` file](https://www.postgresql.org/docs/11/libpq-pgpass.html) to authenticate. You can specify by IP address, FQDN, or by subnet, using the same format as in diff --git a/doc/administration/high_availability/pgbouncer.md b/doc/administration/high_availability/pgbouncer.md index 3b56008feb7..4c672f49e26 100644 --- a/doc/administration/high_availability/pgbouncer.md +++ b/doc/administration/high_availability/pgbouncer.md @@ -215,7 +215,7 @@ To start a session, run ```shell # gitlab-ctl pgb-console Password for user pgbouncer: -psql (9.6.8, server 1.7.2/bouncer) +psql (11.7, server 1.7.2/bouncer) Type "help" for help. pgbouncer=# diff --git a/doc/administration/instance_limits.md b/doc/administration/instance_limits.md index 42d64713508..2f6598da2da 100644 --- a/doc/administration/instance_limits.md +++ b/doc/administration/instance_limits.md @@ -256,6 +256,10 @@ NOTE: **Note:** Set the limit to `0` to disable it. - [Length restrictions for file and directory names](../user/project/wiki/index.md#length-restrictions-for-file-and-directory-names). +## Snippets limits + +See the [documentation on Snippets settings](snippets/index.md). + ## Push Event Limits ### Webhooks and Project Services diff --git a/doc/administration/troubleshooting/ssl.md b/doc/administration/troubleshooting/ssl.md index f230f047ded..e6c081e1eea 100644 --- a/doc/administration/troubleshooting/ssl.md +++ b/doc/administration/troubleshooting/ssl.md @@ -46,6 +46,44 @@ After configuring a GitLab instance with an internal CA certificate, you might n If you have the problems listed above, add your certificate to `/etc/gitlab/trusted-certs` and run `sudo gitlab-ctl reconfigure`. +## X.509 key values mismatch error + +After configuring your instance with a certificate bundle, NGINX may throw the +following error: + +`SSL: error:0B080074:x509 certificate routines:X509_check_private_key:key values mismatch` + +This error means that the server certificate and key you have provided do not +match. You can confirm this by running the following command and comparing the +output: + +```shell +openssl rsa -noout -modulus -in path/to/your/.key | openssl md5 +openssl x509 -noout -modulus -in path/to/your/.crt | openssl md5 +``` + +The following is an example of an md5 output between a matching key and certificate. Note the +matching md5 hashes: + +```shell +$ openssl rsa -noout -modulus -in private.key | openssl md5 +4f49b61b25225abeb7542b29ae20e98c +$ openssl x509 -noout -modulus -in public.crt | openssl md5 +4f49b61b25225abeb7542b29ae20e98c +``` + +This is an opposing output with a non-matching key and certificate which shows different md5 hashes: + +```shell +$ openssl rsa -noout -modulus -in private.key | openssl md5 +d418865077299af27707b1d1fa83cd99 +$ openssl x509 -noout -modulus -in public.crt | openssl md5 +4f49b61b25225abeb7542b29ae20e98c +``` + +If the two outputs differ like the above example, there is a mismatch between the certificate +and key. You should contact the provider of the SSL certificate for further support. + ## Using GitLab Runner with a GitLab instance configured with internal CA certificate or self-signed certificate Besides getting the errors mentioned in diff --git a/doc/ci/docker/using_docker_images.md b/doc/ci/docker/using_docker_images.md index 2759d6de085..51139da2d16 100644 --- a/doc/ci/docker/using_docker_images.md +++ b/doc/ci/docker/using_docker_images.md @@ -209,7 +209,7 @@ default: image: ruby:2.6 services: - - postgres:9.3 + - postgres:11.7 before_script: - bundle install @@ -235,14 +235,14 @@ default: test:2.6: image: ruby:2.6 services: - - postgres:9.3 + - postgres:11.7 script: - bundle exec rake spec test:2.7: image: ruby:2.7 services: - - postgres:9.4 + - postgres:12.2 script: - bundle exec rake spec ``` @@ -257,7 +257,7 @@ default: entrypoint: ["/bin/bash"] services: - - name: my-postgres:9.4 + - name: my-postgres:11.7 alias: db-postgres entrypoint: ["/usr/local/bin/db-postgres"] command: ["start"] @@ -289,7 +289,7 @@ variables: POSTGRES_INITDB_ARGS: "--encoding=UTF8 --data-checksums" services: -- name: postgres:9.4 +- name: postgres:11.7 alias: db entrypoint: ["docker-entrypoint.sh"] command: ["postgres"] diff --git a/doc/development/documentation/styleguide.md b/doc/development/documentation/styleguide.md index ab57ff686b5..44f3a83bbcb 100644 --- a/doc/development/documentation/styleguide.md +++ b/doc/development/documentation/styleguide.md @@ -794,9 +794,11 @@ For more information, see the [confidential issue](../../user/project/issues/con ### Link to specific lines of code -When linking to specifics lines within a file, link to a commit instead of to the branch. +When linking to specific lines within a file, link to a commit instead of to the branch. Lines of code change through time, therefore, linking to a line by using the commit link -ensures the user lands on the line you're referring to. +ensures the user lands on the line you're referring to. The **Permalink** button, which is +available when viewing a file within a project, makes it easy to generate a link to the +most recent commit of the given file. - **Do:** `[link to line 3](https://gitlab.com/gitlab-org/gitlab/-/blob/11f17c56d8b7f0b752562d78a4298a3a95b5ce66/.gitlab/issue_templates/Feature%20proposal.md#L3)` - **Don't:** `[link to line 3](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/Feature%20proposal.md#L3).` diff --git a/doc/development/filtering_by_label.md b/doc/development/filtering_by_label.md index 6f6d7afc040..19dece0d5c9 100644 --- a/doc/development/filtering_by_label.md +++ b/doc/development/filtering_by_label.md @@ -80,7 +80,7 @@ it did not improve query performance. ## Attempt B: Denormalize using an array column Having [removed MySQL support in GitLab 12.1](https://about.gitlab.com/blog/2019/06/27/removing-mysql-support/), -using [PostgreSQL's arrays](https://www.postgresql.org/docs/9.6/arrays.html) became more +using [PostgreSQL's arrays](https://www.postgresql.org/docs/11/arrays.html) became more tractable as we didn't have to support two databases. We discussed denormalizing the `label_links` table for querying in [issue #49651](https://gitlab.com/gitlab-org/gitlab-foss/issues/49651), @@ -91,7 +91,7 @@ and `epics`: `issues.label_ids` would be an array column of label IDs, and `issues.label_titles` would be an array of label titles. These array columns can be complemented with [GIN -indexes](https://www.postgresql.org/docs/9.6/gin-intro.html) to improve +indexes](https://www.postgresql.org/docs/11/gin-intro.html) to improve matching. ### Attempt B1: store label IDs for each object diff --git a/doc/development/geo.md b/doc/development/geo.md index b922fdfa119..bf56340f8ec 100644 --- a/doc/development/geo.md +++ b/doc/development/geo.md @@ -216,15 +216,11 @@ bundle exec rake geo:db:migrate Foreign Data Wrapper ([FDW](#fdw)) is used by the [Geo Log Cursor](#geo-log-cursor) and improves the performance of many synchronization operations. -FDW is a PostgreSQL extension ([`postgres_fdw`](https://www.postgresql.org/docs/current/postgres-fdw.html)) that is enabled within +FDW is a PostgreSQL extension ([`postgres_fdw`](https://www.postgresql.org/docs/11/postgres-fdw.html)) that is enabled within the Geo Tracking Database (on a **secondary** node), which allows it to connect to the readonly database replica and perform queries and filter data from both instances. -While FDW is available in older versions of PostgreSQL, we needed to -raise the minimum required version to 9.6 as this includes many -performance improvements to the FDW implementation. - This persistent connection is configured as an FDW server named `gitlab_secondary`. This configuration exists within the database's user context only. To access the `gitlab_secondary`, GitLab needs to use the diff --git a/doc/development/go_guide/index.md b/doc/development/go_guide/index.md index 8eb192a62cf..6eadcdb5711 100644 --- a/doc/development/go_guide/index.md +++ b/doc/development/go_guide/index.md @@ -249,6 +249,59 @@ Programs handling a lot of IO or complex operations should always include [benchmarks](https://golang.org/pkg/testing/#hdr-Benchmarks), to ensure performance consistency over time. +## Using errors + +### Adding context + +Adding context before you return the error can be helpful, instead of +just returning the error. This allows developers to understand what the +program was trying to do when it entered the error state making it much +easier to debug. + +For example: + +```go +// Wrap the error +return nil, fmt.Errorf("get cache %s: %w", f.Name, err) + +// Just add context +return nil, fmt.Errorf("saving cache %s: %v", f.Name, err) +``` + +A few things to keep in mind when adding context: + +- Decide if you want to expose the underlying error + to the caller. If so, use `%w`, if not, you can use `%v`. +- Don't use words like `failed`, `error`, `didn't`. As it's an error, + the user already knows that something failed and this might lead to + having strings like `failed xx failed xx failed xx`. Explain _what_ + failed instead. +- Error strings should not be capitalized or end with punctuation or a + newline. You can use `golint` to check for this. + +### Naming + +- When using sentinel errors they should always be named like `ErrXxx`. +- When creating a new error type they should always be named like + `XxxError`. + +### Checking Error types + +- To check error equality don't use `==`. Use + [`errors.Is`](https://pkg.go.dev/errors?tab=doc#Is) instead (for Go + versions >= 1.13). +- To check if the error is of a certain type don't use type assertion, + use [`errors.As`](https://pkg.go.dev/errors?tab=doc#As) instead (for + Go versions >= 1.13). + +### References for working with errors + +- [Go 1.13 errors](https://blog.golang.org/go1.13-errors). +- [Programing with + errors](https://peter.bourgon.org/blog/2019/09/11/programming-with-errors.html). +- [Don’t just check errors, handle them + gracefully](https://dave.cheney.net/2016/04/27/dont-just-check-errors-handle-them-gracefully). + ## CLIs Every Go program is launched from the command line. diff --git a/doc/development/namespaces_storage_statistics.md b/doc/development/namespaces_storage_statistics.md index d6b2a7460cd..3065d4f84a2 100644 --- a/doc/development/namespaces_storage_statistics.md +++ b/doc/development/namespaces_storage_statistics.md @@ -38,7 +38,7 @@ alternative method. ### Attempt A: PostgreSQL materialized view -Model can be updated through a refresh strategy based on a project routes SQL and a [materialized view](https://www.postgresql.org/docs/9.6/rules-materializedviews.html): +Model can be updated through a refresh strategy based on a project routes SQL and a [materialized view](https://www.postgresql.org/docs/11/rules-materializedviews.html): ```sql SELECT split_part("rs".path, '/', 1) as root_path, diff --git a/doc/development/verifying_database_capabilities.md b/doc/development/verifying_database_capabilities.md index a5f5661ac9b..f6c78e51299 100644 --- a/doc/development/verifying_database_capabilities.md +++ b/doc/development/verifying_database_capabilities.md @@ -12,7 +12,7 @@ To facilitate this we have the following methods that you can use: This allows you to write code such as: ```ruby -if Gitlab::Database.version.to_f >= 9.6 +if Gitlab::Database.version.to_f >= 11.7 run_really_fast_query else run_fast_query diff --git a/doc/install/requirements.md b/doc/install/requirements.md index 7a0b2056a7b..74c03c8ee4e 100644 --- a/doc/install/requirements.md +++ b/doc/install/requirements.md @@ -174,7 +174,7 @@ If you are using [GitLab Geo](../development/geo.md): - The [tracking database](../development/geo.md#using-the-tracking-database) requires the - [postgres_fdw](https://www.postgresql.org/docs/9.6/postgres-fdw.html) + [postgres_fdw](https://www.postgresql.org/docs/11/postgres-fdw.html) extension. ```sql diff --git a/doc/topics/autodevops/customize.md b/doc/topics/autodevops/customize.md index ac9b2ded720..0b312837260 100644 --- a/doc/topics/autodevops/customize.md +++ b/doc/topics/autodevops/customize.md @@ -353,7 +353,7 @@ The following table lists variables related to the database. | `POSTGRES_USER` | The PostgreSQL user. Defaults to `user`. Set it to use a custom username. | | `POSTGRES_PASSWORD` | The PostgreSQL password. Defaults to `testing-password`. Set it to use a custom password. | | `POSTGRES_DB` | The PostgreSQL database name. Defaults to the value of [`$CI_ENVIRONMENT_SLUG`](../../ci/variables/README.md#predefined-environment-variables). Set it to use a custom database name. | -| `POSTGRES_VERSION` | Tag for the [`postgres` Docker image](https://hub.docker.com/_/postgres) to use. Defaults to `9.6.2`. | +| `POSTGRES_VERSION` | Tag for the [`postgres` Docker image](https://hub.docker.com/_/postgres) to use. Defaults to `11.7`. | ### Disable jobs diff --git a/doc/topics/autodevops/stages.md b/doc/topics/autodevops/stages.md index 57fb20b67fd..3dcde4ab065 100644 --- a/doc/topics/autodevops/stages.md +++ b/doc/topics/autodevops/stages.md @@ -346,7 +346,7 @@ version of the PostgreSQL chart that supports Kubernetes 1.16 and higher: 1. Set the: - `AUTO_DEVOPS_POSTGRES_CHANNEL` variable to `2`. - - `POSTGRES_VERSION` variable to `9.6.16` or higher. + - `POSTGRES_VERSION` variable to `11.7` or higher. DANGER: **Danger:** Opting into `AUTO_DEVOPS_POSTGRES_CHANNEL` version `2` deletes the version `1` PostgreSQL database. Follow the diff --git a/doc/topics/autodevops/upgrading_postgresql.md b/doc/topics/autodevops/upgrading_postgresql.md index 2f50a897481..bee76fdf62f 100644 --- a/doc/topics/autodevops/upgrading_postgresql.md +++ b/doc/topics/autodevops/upgrading_postgresql.md @@ -173,7 +173,7 @@ TIP: **Tip:** You can also PostgreSQL. 1. Set `AUTO_DEVOPS_POSTGRES_DELETE_V1` to a non-empty value. This flag is a safeguard to prevent accidental deletion of databases. -1. Set `POSTGRES_VERSION` to `9.6.16`. This is the minimum PostgreSQL +1. Set `POSTGRES_VERSION` to `11.7`. This is the minimum PostgreSQL version supported. 1. Set `PRODUCTION_REPLICAS` to `0`. For other environments, use `REPLICAS` with an [environment scope](../../ci/environments.md#scoping-environments-with-specs). diff --git a/doc/user/application_security/dependency_scanning/index.md b/doc/user/application_security/dependency_scanning/index.md index ce7b962a943..ebd89d9a017 100644 --- a/doc/user/application_security/dependency_scanning/index.md +++ b/doc/user/application_security/dependency_scanning/index.md @@ -596,6 +596,19 @@ ensure that it can reach your private repository. Here is an example configurati setuptools.ssl_support.cert_paths = ['internal.crt'] ``` +## Limitations + +### Referencing local dependencies using a path in JavaScript projects + +Although dependency scanning doesn't support it, you can reference dependencies by using a +[local path](https://docs.npmjs.com/files/package.json#local-paths) in the `package.json` for a +JavaScript project. The dependency scan generates the following error when you use +`file: <path/to/dependency-name>` to reference a package: + +```text +ERROR: Could not find dependencies: <dependency-name>. You may need to run npm install +``` + ## Troubleshooting ### Error response from daemon: error processing tar file: docker-tar: relocation error diff --git a/doc/user/application_security/index.md b/doc/user/application_security/index.md index 572619f7630..4544d672484 100644 --- a/doc/user/application_security/index.md +++ b/doc/user/application_security/index.md @@ -201,9 +201,19 @@ security team when a merge request would introduce one of the following security - A security vulnerability - A software license compliance violation -This threshold is defined as `high`, `critical`, or `unknown` severity. When any vulnerabilities are -present within a merge request, an approval is required from the `Vulnerability-Check` approver -group. +The security vulnerability threshold is defined as `high`, `critical`, or `unknown` severity. The +`Vulnerability-Check` approver group must approve merge requests that contain vulnerabilities. + +When GitLab can assess vulnerability severity, the rating can be one of the following: + +- `unknown` +- `low` +- `medium` +- `high` +- `critical` + +The rating `unknown` indicates that the underlying scanner doesn't contain or provide a severity +rating. ### Enabling Security Approvals within a project diff --git a/doc/user/clusters/crossplane.md b/doc/user/clusters/crossplane.md index 9a1dde52956..a9a5f768ec8 100644 --- a/doc/user/clusters/crossplane.md +++ b/doc/user/clusters/crossplane.md @@ -167,7 +167,7 @@ metadata: specTemplate: writeConnectionSecretsToNamespace: gitlab-managed-apps forProvider: - databaseVersion: POSTGRES_9_6 + databaseVersion: POSTGRES_11_7 region: $REGION settings: tier: db-custom-1-3840 @@ -189,7 +189,7 @@ metadata: specTemplate: writeConnectionSecretsToNamespace: gitlab-managed-apps forProvider: - databaseVersion: POSTGRES_9_6 + databaseVersion: POSTGRES_11_7 region: $REGION settings: tier: db-custom-1-3840 diff --git a/doc/user/snippets.md b/doc/user/snippets.md index 79d1751d13f..00014dc32ee 100644 --- a/doc/user/snippets.md +++ b/doc/user/snippets.md @@ -11,6 +11,44 @@ There are two types of snippets: - Personal snippets. - Project snippets. +## Personal snippets + +Personal snippets are not related to any project and can be created completely +independently. There are 3 visibility levels that can be set, public, internal +and private. See [Public access](../public_access/public_access.md) for more information. + +## Project snippets + +Project snippets are always related to a specific project. +See [Project features](project/index.md#project-features) for more information. + +## Create a snippet + +To create a personal snippet, click the plus icon (**{plus-square-o}**) +on the top navigation and select **New snippet** from the dropdown menu: + +![New personal snippet from non-project pages](img/new_personal_snippet_v12_10.png) + +If you're on a project's page but you want to create a new personal snippet, +click the plus icon (**{plus-square-o}**) and select **New snippet** from the +lower part of the dropdown (**GitLab** on GitLab.com; **Your Instance** on +self-managed instances): + +![New personal snippet from project pages](img/new_personal_snippet_from_project_v12_10.png) + +To create a project snippet, navigate to your project's page and click the +plus icon (**{plus-square-o}**), then select **New snippet** from the upper +part of the dropdown (**This project**). + +![New personal snippet from project pages](img/new_project_snippet_from_project_v12_10.png) + +From there, add the **Title**, **Description**, and a **File** name with the +appropriate extension (for example, `example.rb`, `index.html`). + +CAUTION: **Warning:** +Make sure to add the file name to get code highlighting and to avoid this +[copy-pasting bug](https://gitlab.com/gitlab-org/gitlab/-/issues/22870). + ## Versioned Snippets > [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/239) in GitLab 13.0. @@ -61,7 +99,7 @@ master branch. ### Limitations - Binary files are not supported. -- Creating or deleting branches is not supported. Only a default *master* +- Creating or deleting branches is not supported. Only a default *master*. branch is used. - Git tags are not supported in snippet repositories. - Snippets' repositories are limited to one file. Attempting to push more @@ -70,45 +108,7 @@ than one file will result in an error. it's planned to be added in future iterations. See the [revisions tab issue](https://gitlab.com/gitlab-org/gitlab/-/issues/39271) for updates. - The [maximum size for a snippet](../administration/snippets/index.md#snippets-content-size-limit) -is 50MB, by default. - -## Personal snippets - -Personal snippets are not related to any project and can be created completely -independently. There are 3 visibility levels that can be set, public, internal -and private. See [Public access](../public_access/public_access.md) for more information. - -## Project snippets - -Project snippets are always related to a specific project. -See [Project features](project/index.md#project-features) for more information. - -## Create a snippet - -To create a personal snippet, click the plus icon (**{plus-square-o}**) -on the top navigation and select **New snippet** from the dropdown menu: - -![New personal snippet from non-project pages](img/new_personal_snippet_v12_10.png) - -If you're on a project's page but you want to create a new personal snippet, -click the plus icon (**{plus-square-o}**) and select **New snippet** from the -lower part of the dropdown (**GitLab** on GitLab.com; **Your Instance** on -self-managed instances): - -![New personal snippet from project pages](img/new_personal_snippet_from_project_v12_10.png) - -To create a project snippet, navigate to your project's page and click the -plus icon (**{plus-square-o}**), then select **New snippet** from the upper -part of the dropdown (**This project**). - -![New personal snippet from project pages](img/new_project_snippet_from_project_v12_10.png) - -From there, add the **Title**, **Description**, and a **File** name with the -appropriate extension (for example, `example.rb`, `index.html`). - -CAUTION: **Warning:** -Make sure to add the file name to get code highlighting and to avoid this -[copy-pasting bug](https://gitlab.com/gitlab-org/gitlab/-/issues/22870). +is 50 MB, by default. ## Discover snippets diff --git a/lib/gitlab/experimentation.rb b/lib/gitlab/experimentation.rb index 5b57d7929a6..0097961eed4 100644 --- a/lib/gitlab/experimentation.rb +++ b/lib/gitlab/experimentation.rb @@ -8,16 +8,22 @@ # - tracking_category (optional, used to set the category when tracking an experiment event) # # The experiment is controlled by a Feature Flag (https://docs.gitlab.com/ee/development/feature_flags/controls.html), -# which is named "#{key}_experiment_percentage" and *must* be set with a percentage and not be used for other purposes. -# To enable the experiment for 10% of the users (determined by the `experimentation_subject_index` value from a cookie): +# which is named "#{experiment_key}_experiment_percentage" and *must* be set with a percentage and not be used for other purposes. # -# chatops: `/chatops run feature set key_experiment_percentage 10` -# console: `Feature.get(:key_experiment_percentage).enable_percentage_of_time(10)` +# To enable the experiment for 10% of the users: +# +# chatops: `/chatops run feature set experiment_key_experiment_percentage 10` +# console: `Feature.get(:experiment_key_experiment_percentage).enable_percentage_of_time(10)` # # To disable the experiment: # -# chatops: `/chatops run feature delete key_experiment_percentage` -# console: `Feature.get(:key_experiment_percentage).remove` +# chatops: `/chatops run feature delete experiment_key_experiment_percentage` +# console: `Feature.get(:experiment_key_experiment_percentage).remove` +# +# To check the current rollout percentage: +# +# chatops: `/chatops run feature get experiment_key_experiment_percentage` +# console: `Feature.get(:experiment_key_experiment_percentage).percentage_of_time_value` # module Gitlab module Experimentation @@ -25,6 +31,9 @@ module Gitlab signup_flow: { tracking_category: 'Growth::Acquisition::Experiment::SignUpFlow' }, + onboarding_issues: { + tracking_category: 'Growth::Conversion::Experiment::OnboardingIssues' + }, suggest_pipeline: { tracking_category: 'Growth::Expansion::Experiment::SuggestPipeline' }, diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 443b1ad0602..e9f2a38c061 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -24285,6 +24285,9 @@ msgstr "" msgid "Wiki|Wiki Pages" msgstr "" +msgid "Will be created" +msgstr "" + msgid "Will deploy to" msgstr "" @@ -26287,6 +26290,9 @@ msgstr "" msgid "wiki page" msgstr "" +msgid "will be released %{time}" +msgstr "" + msgid "with %{additions} additions, %{deletions} deletions." msgstr "" diff --git a/package.json b/package.json index 2d9b820b57f..a0149a6c7df 100644 --- a/package.json +++ b/package.json @@ -164,7 +164,6 @@ "eslint-import-resolver-jest": "^2.1.1", "eslint-import-resolver-webpack": "^0.12.1", "eslint-plugin-jasmine": "^4.1.0", - "eslint-plugin-jest": "^22.3.0", "eslint-plugin-no-jquery": "^2.3.0", "gettext-extractor": "^3.4.3", "gettext-extractor-vue": "^4.0.2", diff --git a/spec/frontend/.eslintrc.yml b/spec/frontend/.eslintrc.yml index c8aacca5ef2..8e6faa90c58 100644 --- a/spec/frontend/.eslintrc.yml +++ b/spec/frontend/.eslintrc.yml @@ -1,10 +1,6 @@ --- -env: - jest/globals: true -plugins: - - jest extends: - - 'plugin:jest/recommended' + - 'plugin:@gitlab/jest' settings: # We have to teach eslint-plugin-import what node modules we use # otherwise there is an error when it tries to resolve them @@ -20,3 +16,12 @@ globals: loadFixtures: false preloadFixtures: false setFixtures: false +rules: + jest/expect-expect: + - off + - assertFunctionNames: + - 'expect*' + - 'assert*' + - 'testAction' + jest/no-test-callback: + - off diff --git a/spec/frontend/diff_comments_store_spec.js b/spec/frontend/diff_comments_store_spec.js index 82588b1ee7b..6f25c9dd3bc 100644 --- a/spec/frontend/diff_comments_store_spec.js +++ b/spec/frontend/diff_comments_store_spec.js @@ -41,7 +41,6 @@ describe('New discussion', () => { describe('Get note', () => { beforeEach(() => { - expect(Object.keys(CommentsStore.state).length).toBe(0); createDiscussion(); }); @@ -55,7 +54,6 @@ describe('Get note', () => { describe('Delete discussion', () => { beforeEach(() => { - expect(Object.keys(CommentsStore.state).length).toBe(0); createDiscussion(); }); @@ -81,7 +79,6 @@ describe('Delete discussion', () => { describe('Update note', () => { beforeEach(() => { - expect(Object.keys(CommentsStore.state).length).toBe(0); createDiscussion(); }); @@ -96,7 +93,6 @@ describe('Update note', () => { describe('Discussion resolved', () => { beforeEach(() => { - expect(Object.keys(CommentsStore.state).length).toBe(0); createDiscussion(); }); diff --git a/spec/frontend/helpers/class_spec_helper.js b/spec/frontend/helpers/class_spec_helper.js index 7a60d33b471..b26f087f0c5 100644 --- a/spec/frontend/helpers/class_spec_helper.js +++ b/spec/frontend/helpers/class_spec_helper.js @@ -1,3 +1,4 @@ +// eslint-disable-next-line jest/no-export export default class ClassSpecHelper { static itShouldBeAStaticMethod(base, method) { return it('should be a static method', () => { diff --git a/spec/frontend/lib/utils/axios_utils_spec.js b/spec/frontend/lib/utils/axios_utils_spec.js index d5c39567f06..1585a38ae86 100644 --- a/spec/frontend/lib/utils/axios_utils_spec.js +++ b/spec/frontend/lib/utils/axios_utils_spec.js @@ -11,6 +11,7 @@ describe('axios_utils', () => { mock = new AxiosMockAdapter(axios); mock.onAny('/ok').reply(200); mock.onAny('/err').reply(500); + // eslint-disable-next-line jest/no-standalone-expect expect(axios.countActiveRequests()).toBe(0); }); diff --git a/spec/frontend/releases/components/release_block_footer_spec.js b/spec/frontend/releases/components/release_block_footer_spec.js index c63637c4cae..b91cfb82b65 100644 --- a/spec/frontend/releases/components/release_block_footer_spec.js +++ b/spec/frontend/releases/components/release_block_footer_spec.js @@ -3,13 +3,17 @@ import { GlLink } from '@gitlab/ui'; import { trimText } from 'helpers/text_helper'; import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue'; import Icon from '~/vue_shared/components/icon.vue'; -import { release } from '../mock_data'; +import { release as originalRelease } from '../mock_data'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; +import { cloneDeep } from 'lodash'; + +const mockFutureDate = new Date(9999, 0, 0).toISOString(); +let mockIsFutureRelease = false; jest.mock('~/vue_shared/mixins/timeago', () => ({ methods: { timeFormatted() { - return '7 fortnights ago'; + return mockIsFutureRelease ? 'in 1 month' : '7 fortnights ago'; }, tooltipTitle() { return 'February 30, 2401'; @@ -19,12 +23,12 @@ jest.mock('~/vue_shared/mixins/timeago', () => ({ describe('Release block footer', () => { let wrapper; - let releaseClone; + let release; const factory = (props = {}) => { wrapper = mount(ReleaseBlockFooter, { propsData: { - ...convertObjectPropsToCamelCase(releaseClone, { deep: true }), + ...convertObjectPropsToCamelCase(release, { deep: true }), ...props, }, }); @@ -33,11 +37,13 @@ describe('Release block footer', () => { }; beforeEach(() => { - releaseClone = JSON.parse(JSON.stringify(release)); + release = cloneDeep(originalRelease); }); afterEach(() => { wrapper.destroy(); + wrapper = null; + mockIsFutureRelease = false; }); const commitInfoSection = () => wrapper.find('.js-commit-info'); @@ -60,8 +66,8 @@ describe('Release block footer', () => { const commitLink = commitInfoSectionLink(); expect(commitLink.exists()).toBe(true); - expect(commitLink.text()).toBe(releaseClone.commit.short_id); - expect(commitLink.attributes('href')).toBe(releaseClone.commit_path); + expect(commitLink.text()).toBe(release.commit.short_id); + expect(commitLink.attributes('href')).toBe(release.commit_path); }); it('renders the tag icon', () => { @@ -75,28 +81,60 @@ describe('Release block footer', () => { const commitLink = tagInfoSection().find(GlLink); expect(commitLink.exists()).toBe(true); - expect(commitLink.text()).toBe(releaseClone.tag_name); - expect(commitLink.attributes('href')).toBe(releaseClone.tag_path); + expect(commitLink.text()).toBe(release.tag_name); + expect(commitLink.attributes('href')).toBe(release.tag_path); }); it('renders the author and creation time info', () => { expect(trimText(authorDateInfoSection().text())).toBe( - `Created 7 fortnights ago by ${releaseClone.author.username}`, + `Created 7 fortnights ago by ${release.author.username}`, ); }); + describe('when the release date is in the past', () => { + it('prefixes the creation info with "Created"', () => { + expect(trimText(authorDateInfoSection().text())).toEqual(expect.stringMatching(/^Created/)); + }); + }); + + describe('renders the author and creation time info with future release date', () => { + beforeEach(() => { + mockIsFutureRelease = true; + factory({ releasedAt: mockFutureDate }); + }); + + it('renders the release date without the author name', () => { + expect(trimText(authorDateInfoSection().text())).toBe( + `Will be created in 1 month by ${release.author.username}`, + ); + }); + }); + + describe('when the release date is in the future', () => { + beforeEach(() => { + mockIsFutureRelease = true; + factory({ releasedAt: mockFutureDate }); + }); + + it('prefixes the creation info with "Will be created"', () => { + expect(trimText(authorDateInfoSection().text())).toEqual( + expect.stringMatching(/^Will be created/), + ); + }); + }); + it("renders the author's avatar image", () => { const avatarImg = authorDateInfoSection().find('img'); expect(avatarImg.exists()).toBe(true); - expect(avatarImg.attributes('src')).toBe(releaseClone.author.avatar_url); + expect(avatarImg.attributes('src')).toBe(release.author.avatar_url); }); it("renders a link to the author's profile", () => { const authorLink = authorDateInfoSection().find(GlLink); expect(authorLink.exists()).toBe(true); - expect(authorLink.attributes('href')).toBe(releaseClone.author.web_url); + expect(authorLink.attributes('href')).toBe(release.author.web_url); }); }); @@ -113,7 +151,7 @@ describe('Release block footer', () => { it('renders the commit SHA as plain text (instead of a link)', () => { expect(commitInfoSectionLink().exists()).toBe(false); - expect(commitInfoSection().text()).toBe(releaseClone.commit.short_id); + expect(commitInfoSection().text()).toBe(release.commit.short_id); }); }); @@ -130,7 +168,7 @@ describe('Release block footer', () => { it('renders the tag name as plain text (instead of a link)', () => { expect(tagInfoSectionLink().exists()).toBe(false); - expect(tagInfoSection().text()).toBe(releaseClone.tag_name); + expect(tagInfoSection().text()).toBe(release.tag_name); }); }); @@ -138,7 +176,18 @@ describe('Release block footer', () => { beforeEach(() => factory({ author: undefined })); it('renders the release date without the author name', () => { - expect(trimText(authorDateInfoSection().text())).toBe('Created 7 fortnights ago'); + expect(trimText(authorDateInfoSection().text())).toBe(`Created 7 fortnights ago`); + }); + }); + + describe('future release without any author info', () => { + beforeEach(() => { + mockIsFutureRelease = true; + factory({ author: undefined, releasedAt: mockFutureDate }); + }); + + it('renders the release date without the author name', () => { + expect(trimText(authorDateInfoSection().text())).toBe(`Will be created in 1 month`); }); }); @@ -147,7 +196,7 @@ describe('Release block footer', () => { it('renders the author name without the release date', () => { expect(trimText(authorDateInfoSection().text())).toBe( - `Created by ${releaseClone.author.username}`, + `Created by ${release.author.username}`, ); }); }); diff --git a/spec/frontend/releases/components/release_block_metadata_spec.js b/spec/frontend/releases/components/release_block_metadata_spec.js new file mode 100644 index 00000000000..cbe478bfa1f --- /dev/null +++ b/spec/frontend/releases/components/release_block_metadata_spec.js @@ -0,0 +1,67 @@ +import { mount } from '@vue/test-utils'; +import { trimText } from 'helpers/text_helper'; +import ReleaseBlockMetadata from '~/releases/components/release_block_metadata.vue'; +import { release as originalRelease } from '../mock_data'; +import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; +import { cloneDeep } from 'lodash'; + +const mockFutureDate = new Date(9999, 0, 0).toISOString(); +let mockIsFutureRelease = false; + +jest.mock('~/vue_shared/mixins/timeago', () => ({ + methods: { + timeFormatted() { + return mockIsFutureRelease ? 'in 1 month' : '7 fortnights ago'; + }, + tooltipTitle() { + return 'February 30, 2401'; + }, + }, +})); + +describe('Release block metadata', () => { + let wrapper; + let release; + + const factory = (releaseUpdates = {}) => { + wrapper = mount(ReleaseBlockMetadata, { + propsData: { + release: { + ...convertObjectPropsToCamelCase(release, { deep: true }), + ...releaseUpdates, + }, + }, + }); + }; + + beforeEach(() => { + release = cloneDeep(originalRelease); + }); + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + mockIsFutureRelease = false; + }); + + const findReleaseDateInfo = () => wrapper.find('.js-release-date-info'); + + describe('with all props provided', () => { + beforeEach(() => factory()); + + it('renders the release time info', () => { + expect(trimText(findReleaseDateInfo().text())).toBe(`released 7 fortnights ago`); + }); + }); + + describe('with a future release date', () => { + beforeEach(() => { + mockIsFutureRelease = true; + factory({ releasedAt: mockFutureDate }); + }); + + it('renders the release date without the author name', () => { + expect(trimText(findReleaseDateInfo().text())).toBe(`will be released in 1 month`); + }); + }); +}); diff --git a/spec/frontend/releases/stores/modules/detail/mutations_spec.js b/spec/frontend/releases/stores/modules/detail/mutations_spec.js index 9d4f78be327..155390c2cee 100644 --- a/spec/frontend/releases/stores/modules/detail/mutations_spec.js +++ b/spec/frontend/releases/stores/modules/detail/mutations_spec.js @@ -20,7 +20,6 @@ describe('Release detail mutations', () => { release = convertObjectPropsToCamelCase(originalRelease); }); - // eslint-disable-next-line jest/valid-describe describe(types.REQUEST_RELEASE, () => { it('set state.isFetchingRelease to true', () => { mutations[types.REQUEST_RELEASE](state); @@ -29,7 +28,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.RECEIVE_RELEASE_SUCCESS, () => { it('handles a successful response from the server', () => { mutations[types.RECEIVE_RELEASE_SUCCESS](state, release); @@ -44,7 +42,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.RECEIVE_RELEASE_ERROR, () => { it('handles an unsuccessful response from the server', () => { const error = { message: 'An error occurred!' }; @@ -58,7 +55,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.UPDATE_RELEASE_TITLE, () => { it("updates the release's title", () => { state.release = release; @@ -69,7 +65,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.UPDATE_RELEASE_NOTES, () => { it("updates the release's notes", () => { state.release = release; @@ -80,7 +75,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.REQUEST_UPDATE_RELEASE, () => { it('set state.isUpdatingRelease to true', () => { mutations[types.REQUEST_UPDATE_RELEASE](state); @@ -89,7 +83,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.RECEIVE_UPDATE_RELEASE_SUCCESS, () => { it('handles a successful response from the server', () => { mutations[types.RECEIVE_UPDATE_RELEASE_SUCCESS](state, release); @@ -100,7 +93,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.RECEIVE_UPDATE_RELEASE_ERROR, () => { it('handles an unsuccessful response from the server', () => { const error = { message: 'An error occurred!' }; @@ -112,7 +104,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.ADD_EMPTY_ASSET_LINK, () => { it('adds a new, empty link object to the release', () => { state.release = release; @@ -132,7 +123,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.UPDATE_ASSET_LINK_URL, () => { it('updates an asset link with a new URL', () => { state.release = release; @@ -148,7 +138,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.UPDATE_ASSET_LINK_NAME, () => { it('updates an asset link with a new name', () => { state.release = release; @@ -164,7 +153,6 @@ describe('Release detail mutations', () => { }); }); - // eslint-disable-next-line jest/valid-describe describe(types.REMOVE_ASSET_LINK, () => { it('removes an asset link from the release', () => { state.release = release; diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js index 3a75ab2d127..98962918b49 100644 --- a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js +++ b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js @@ -56,13 +56,8 @@ describe('date time picker lib', () => { describe('stringToISODate', () => { ['', 'null', undefined, 'abc'].forEach(input => { - it(`throws error for invalid input like ${input}`, done => { - try { - dateTimePickerLib.stringToISODate(input); - } catch (e) { - expect(e).toBeDefined(); - done(); - } + it(`throws error for invalid input like ${input}`, () => { + expect(() => dateTimePickerLib.stringToISODate(input)).toThrow(); }); }); [ diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 907c2579ce3..294eaad1f8e 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -229,6 +229,7 @@ stages: - processables - builds - bridges +- latest_statuses statuses: - project - pipeline diff --git a/spec/lib/gitlab/performance_bar_spec.rb b/spec/lib/gitlab/performance_bar_spec.rb index f720b3293e0..7b79cc82816 100644 --- a/spec/lib/gitlab/performance_bar_spec.rb +++ b/spec/lib/gitlab/performance_bar_spec.rb @@ -3,41 +3,6 @@ require 'spec_helper' describe Gitlab::PerformanceBar do - shared_examples 'allowed user IDs are cached' do - before do - # Warm the caches - described_class.enabled_for_user?(user) - end - - it 'caches the allowed user IDs in cache', :use_clean_rails_memory_store_caching do - expect do - expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original - expect(described_class.l2_cache_backend).not_to receive(:fetch) - expect(described_class.enabled_for_user?(user)).to be_truthy - end.not_to exceed_query_limit(0) - end - - it 'caches the allowed user IDs in L1 cache for 1 minute', :use_clean_rails_memory_store_caching do - Timecop.travel 2.minutes do - expect do - expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original - expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original - expect(described_class.enabled_for_user?(user)).to be_truthy - end.not_to exceed_query_limit(0) - end - end - - it 'caches the allowed user IDs in L2 cache for 5 minutes', :use_clean_rails_memory_store_caching do - Timecop.travel 6.minutes do - expect do - expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original - expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original - expect(described_class.enabled_for_user?(user)).to be_truthy - end.not_to exceed_query_limit(2) - end - end - end - it { expect(described_class.l1_cache_backend).to eq(Gitlab::ProcessMemoryCache.cache_backend) } it { expect(described_class.l2_cache_backend).to eq(Rails.cache) } @@ -82,7 +47,16 @@ describe Gitlab::PerformanceBar do expect(described_class.enabled_for_user?(user)).to be_falsy end - it_behaves_like 'allowed user IDs are cached' + context 'caching of allowed user IDs' do + subject { described_class.enabled_for_user?(user) } + + before do + # Warm the caches + described_class.enabled_for_user?(user) + end + + it_behaves_like 'allowed user IDs are cached' + end end context 'when user is a member of the allowed group' do @@ -94,7 +68,16 @@ describe Gitlab::PerformanceBar do expect(described_class.enabled_for_user?(user)).to be_truthy end - it_behaves_like 'allowed user IDs are cached' + context 'caching of allowed user IDs' do + subject { described_class.enabled_for_user?(user) } + + before do + # Warm the caches + described_class.enabled_for_user?(user) + end + + it_behaves_like 'allowed user IDs are cached' + end end end diff --git a/spec/models/ci/freeze_period_status_spec.rb b/spec/models/ci/freeze_period_status_spec.rb index ba364e40298..b700ec8c45f 100644 --- a/spec/models/ci/freeze_period_status_spec.rb +++ b/spec/models/ci/freeze_period_status_spec.rb @@ -30,7 +30,7 @@ describe Ci::FreezePeriodStatus do it_behaves_like 'outside freeze period', Time.utc(2020, 4, 10, 22, 59) - it_behaves_like 'within freeze period', Time.utc(2020, 4, 10, 23, 01) + it_behaves_like 'within freeze period', Time.utc(2020, 4, 10, 23, 1) it_behaves_like 'within freeze period', Time.utc(2020, 4, 13, 6, 59) diff --git a/spec/serializers/ci/dag_job_entity_spec.rb b/spec/serializers/ci/dag_job_entity_spec.rb new file mode 100644 index 00000000000..19b849c3879 --- /dev/null +++ b/spec/serializers/ci/dag_job_entity_spec.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Ci::DagJobEntity do + let_it_be(:request) { double(:request) } + + let(:job) { create(:ci_build, name: 'dag_job') } + let(:entity) { described_class.new(job, request: request) } + + describe '#as_json' do + subject { entity.as_json } + + it 'contains the name' do + expect(subject[:name]).to eq 'dag_job' + end + + context 'when job is stage scheduled' do + it 'does not expose needs' do + expect(subject).not_to include(:needs) + end + end + + context 'when job is dag scheduled' do + context 'when job has needs' do + let(:job) { create(:ci_build, scheduling_type: 'dag') } + let!(:need) { create(:ci_build_need, build: job, name: 'compile') } + + it 'exposes the array of needs' do + expect(subject[:needs]).to eq ['compile'] + end + end + + context 'when job has empty needs' do + let(:job) { create(:ci_build, scheduling_type: 'dag') } + + it 'exposes an empty array of needs' do + expect(subject[:needs]).to eq [] + end + end + end + end +end diff --git a/spec/serializers/ci/dag_job_group_entity_spec.rb b/spec/serializers/ci/dag_job_group_entity_spec.rb new file mode 100644 index 00000000000..a25723894fd --- /dev/null +++ b/spec/serializers/ci/dag_job_group_entity_spec.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Ci::DagJobGroupEntity do + let_it_be(:request) { double(:request) } + let_it_be(:pipeline) { create(:ci_pipeline) } + let_it_be(:stage) { create(:ci_stage, pipeline: pipeline) } + + let(:group) { Ci::Group.new(pipeline.project, stage, name: 'test', jobs: jobs) } + let(:entity) { described_class.new(group, request: request) } + + describe '#as_json' do + subject { entity.as_json } + + context 'when group contains 1 job' do + let(:job) { create(:ci_build, stage: stage, pipeline: pipeline, name: 'test') } + let(:jobs) { [job] } + + it 'exposes a name' do + expect(subject.fetch(:name)).to eq 'test' + end + + it 'exposes the size' do + expect(subject.fetch(:size)).to eq 1 + end + + it 'exposes the jobs' do + exposed_jobs = subject.fetch(:jobs) + + expect(exposed_jobs.size).to eq 1 + expect(exposed_jobs.first.fetch(:name)).to eq 'test' + end + end + + context 'when group contains multiple parallel jobs' do + let(:job_1) { create(:ci_build, stage: stage, pipeline: pipeline, name: 'test 1/2') } + let(:job_2) { create(:ci_build, stage: stage, pipeline: pipeline, name: 'test 2/2') } + let(:jobs) { [job_1, job_2] } + + it 'exposes a name' do + expect(subject.fetch(:name)).to eq 'test' + end + + it 'exposes the size' do + expect(subject.fetch(:size)).to eq 2 + end + + it 'exposes the jobs' do + exposed_jobs = subject.fetch(:jobs) + + expect(exposed_jobs.size).to eq 2 + expect(exposed_jobs.first.fetch(:name)).to eq 'test 1/2' + expect(exposed_jobs.last.fetch(:name)).to eq 'test 2/2' + end + end + end +end diff --git a/spec/serializers/ci/dag_pipeline_entity_spec.rb b/spec/serializers/ci/dag_pipeline_entity_spec.rb new file mode 100644 index 00000000000..4645451e146 --- /dev/null +++ b/spec/serializers/ci/dag_pipeline_entity_spec.rb @@ -0,0 +1,112 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Ci::DagPipelineEntity do + let_it_be(:request) { double(:request) } + + let(:pipeline) { create(:ci_pipeline) } + let(:entity) { described_class.new(pipeline, request: request) } + + describe '#as_json' do + subject { entity.as_json } + + context 'when pipeline is empty' do + it 'contains stages' do + expect(subject).to include(:stages) + + expect(subject[:stages]).to be_empty + end + end + + context 'when pipeline has jobs' do + let!(:build_job) { create(:ci_build, stage: 'build', pipeline: pipeline) } + let!(:test_job) { create(:ci_build, stage: 'test', pipeline: pipeline) } + let!(:deploy_job) { create(:ci_build, stage: 'deploy', pipeline: pipeline) } + + it 'contains 3 stages' do + stages = subject[:stages] + + expect(stages.size).to eq 3 + expect(stages.map { |s| s[:name] }).to contain_exactly('build', 'test', 'deploy') + end + end + + context 'when pipeline has parallel jobs and DAG needs' do + let!(:stage_build) { create(:ci_stage_entity, name: 'build', position: 1, pipeline: pipeline) } + let!(:stage_test) { create(:ci_stage_entity, name: 'test', position: 2, pipeline: pipeline) } + let!(:stage_deploy) { create(:ci_stage_entity, name: 'deploy', position: 3, pipeline: pipeline) } + + let!(:job_build_1) { create(:ci_build, name: 'build 1', stage: 'build', pipeline: pipeline) } + let!(:job_build_2) { create(:ci_build, name: 'build 2', stage: 'build', pipeline: pipeline) } + + let!(:job_rspec_1) { create(:ci_build, name: 'rspec 1/2', stage: 'test', pipeline: pipeline) } + let!(:job_rspec_2) { create(:ci_build, name: 'rspec 2/2', stage: 'test', pipeline: pipeline) } + + let!(:job_jest) do + create(:ci_build, name: 'jest', stage: 'test', scheduling_type: 'dag', pipeline: pipeline).tap do |job| + create(:ci_build_need, name: 'build 1', build: job) + end + end + + let!(:job_deploy_ruby) do + create(:ci_build, name: 'deploy_ruby', stage: 'deploy', scheduling_type: 'dag', pipeline: pipeline).tap do |job| + create(:ci_build_need, name: 'rspec 1/2', build: job) + create(:ci_build_need, name: 'rspec 2/2', build: job) + end + end + + let!(:job_deploy_js) do + create(:ci_build, name: 'deploy_js', stage: 'deploy', scheduling_type: 'dag', pipeline: pipeline).tap do |job| + create(:ci_build_need, name: 'jest', build: job) + end + end + + it 'performs the smallest number of queries' do + log = ActiveRecord::QueryRecorder.new { subject } + + # stages, project, builds, build_needs + expect(log.count).to eq 4 + end + + it 'contains all the data' do + expected_result = { + stages: [ + { + name: 'build', + groups: [ + { name: 'build 1', size: 1, jobs: [{ name: 'build 1' }] }, + { name: 'build 2', size: 1, jobs: [{ name: 'build 2' }] } + ] + }, + { + name: 'test', + groups: [ + { name: 'jest', size: 1, jobs: [{ name: 'jest', needs: ['build 1'] }] }, + { name: 'rspec', size: 2, jobs: [{ name: 'rspec 1/2' }, { name: 'rspec 2/2' }] } + ] + }, + { + name: 'deploy', + groups: [ + { name: 'deploy_js', size: 1, jobs: [{ name: 'deploy_js', needs: ['jest'] }] }, + { name: 'deploy_ruby', size: 1, jobs: [{ name: 'deploy_ruby', needs: ['rspec 1/2', 'rspec 2/2'] }] } + ] + } + ] + } + + expect(subject.fetch(:stages)).not_to be_empty + + expect(subject.fetch(:stages)[0].fetch(:name)).to eq 'build' + expect(subject.fetch(:stages)[0]).to eq expected_result.fetch(:stages)[0] + + expect(subject.fetch(:stages)[1].fetch(:name)).to eq 'test' + expect(subject.fetch(:stages)[1]).to eq expected_result.fetch(:stages)[1] + + expect(subject.fetch(:stages)[2].fetch(:name)).to eq 'deploy' + expect(subject.fetch(:stages)[2]).to eq expected_result.fetch(:stages)[2] + end + end + end +end diff --git a/spec/serializers/ci/dag_pipeline_serializer_spec.rb b/spec/serializers/ci/dag_pipeline_serializer_spec.rb new file mode 100644 index 00000000000..abf895c3e77 --- /dev/null +++ b/spec/serializers/ci/dag_pipeline_serializer_spec.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Ci::DagPipelineSerializer do + describe '#represent' do + subject { described_class.new.represent(pipeline) } + + let(:pipeline) { create(:ci_pipeline) } + let!(:job) { create(:ci_build, pipeline: pipeline) } + + it 'includes stages' do + expect(subject[:stages]).to be_present + expect(subject[:stages].size).to eq 1 + end + end +end diff --git a/spec/serializers/ci/dag_stage_entity_spec.rb b/spec/serializers/ci/dag_stage_entity_spec.rb new file mode 100644 index 00000000000..5c6aa7faee4 --- /dev/null +++ b/spec/serializers/ci/dag_stage_entity_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Ci::DagStageEntity do + let_it_be(:pipeline) { create(:ci_pipeline) } + let_it_be(:request) { double(:request) } + + let(:stage) { build(:ci_stage, pipeline: pipeline, name: 'test') } + let(:entity) { described_class.new(stage, request: request) } + + let!(:job) { create(:ci_build, :success, pipeline: pipeline) } + + describe '#as_json' do + subject { entity.as_json } + + it 'contains valid name' do + expect(subject[:name]).to eq 'test' + end + + it 'contains the job groups' do + expect(subject).to include :groups + expect(subject[:groups]).not_to be_empty + + job_group = subject[:groups].first + expect(job_group[:name]).to eq 'test' + expect(job_group[:size]).to eq 1 + expect(job_group[:jobs]).not_to be_empty + end + end +end diff --git a/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb b/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb new file mode 100644 index 00000000000..04f49e94647 --- /dev/null +++ b/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +shared_examples 'allowed user IDs are cached' do + it 'caches the allowed user IDs in cache', :use_clean_rails_memory_store_caching do + expect do + expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original + expect(described_class.l2_cache_backend).not_to receive(:fetch) + expect(subject).to be_truthy + end.not_to exceed_query_limit(0) + end + + it 'caches the allowed user IDs in L1 cache for 1 minute', :use_clean_rails_memory_store_caching do + Timecop.travel 2.minutes do + expect do + expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original + expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original + expect(subject).to be_truthy + end.not_to exceed_query_limit(0) + end + end + + it 'caches the allowed user IDs in L2 cache for 5 minutes', :use_clean_rails_memory_store_caching do + Timecop.travel 6.minutes do + expect do + expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original + expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original + expect(subject).to be_truthy + end.not_to exceed_query_limit(2) + end + end +end diff --git a/yarn.lock b/yarn.lock index 52119a27543..fdeaf56eb44 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4385,11 +4385,6 @@ eslint-plugin-jasmine@^4.1.0: resolved "https://registry.yarnpkg.com/eslint-plugin-jasmine/-/eslint-plugin-jasmine-4.1.0.tgz#4f6d41b1a8622348c97559cbcd29badffa74dbfa" integrity sha512-Vfuk2Sm1ULR7MqGjVIOOEdQWyoFBfSwvwUeo9MrajVGJB3C24c9Mmj1Cgf8Qwmf3aS2bezPt1sckpKXWpd74Dw== -eslint-plugin-jest@^22.3.0: - version "22.3.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-22.3.0.tgz#a10f10dedfc92def774ec9bb5bfbd2fb8e1c96d2" - integrity sha512-P1mYVRNlOEoO5T9yTqOfucjOYf1ktmJ26NjwjH8sxpCFQa6IhBGr5TpKl3hcAAT29hOsRJVuMWmTsHoUVo9FoA== - eslint-plugin-jest@^23.8.2: version "23.8.2" resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-23.8.2.tgz#6f28b41c67ef635f803ebd9e168f6b73858eb8d4" |