diff options
author | Lukas Eipert <git@leipert.io> | 2018-04-10 10:36:59 +0200 |
---|---|---|
committer | Lukas Eipert <git@leipert.io> | 2018-04-10 10:36:59 +0200 |
commit | 8e66411488e87d59dde65c690892d9495292fe86 (patch) | |
tree | 482d2bd7b2e7142ee5ab74406d586d84c91ba56a /config | |
parent | daa6195f84f53cc484ad0730f2c5a88c6e654b15 (diff) | |
parent | 91a3a4b526214f4174f4f12de23dfb5baf033211 (diff) | |
download | gitlab-ce-8e66411488e87d59dde65c690892d9495292fe86.tar.gz |
Merge branch 'master' into winh-single-karma-test
# Conflicts:
# spec/javascripts/test_bundle.js
Diffstat (limited to 'config')
60 files changed, 1069 insertions, 528 deletions
diff --git a/config/application.rb b/config/application.rb index 1110199b888..13501d4bdb5 100644 --- a/config/application.rb +++ b/config/application.rb @@ -6,10 +6,12 @@ Bundler.require(:default, Rails.env) module Gitlab class Application < Rails::Application + require_dependency Rails.root.join('lib/gitlab/redis/wrapper') require_dependency Rails.root.join('lib/gitlab/redis/cache') require_dependency Rails.root.join('lib/gitlab/redis/queues') require_dependency Rails.root.join('lib/gitlab/redis/shared_state') require_dependency Rails.root.join('lib/gitlab/request_context') + require_dependency Rails.root.join('lib/gitlab/current_settings') # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers @@ -24,6 +26,7 @@ module Gitlab # This is a nice reference article on autoloading/eager loading: # http://blog.arkency.com/2014/11/dont-forget-about-eager-load-when-extending-autoload config.eager_load_paths.push(*%W[#{config.root}/lib + #{config.root}/app/models/badges #{config.root}/app/models/hooks #{config.root}/app/models/members #{config.root}/app/models/project_services @@ -61,6 +64,7 @@ module Gitlab # - Any parameter containing `secret` # - Two-factor tokens (:otp_attempt) # - Repo/Project Import URLs (:import_url) + # - Build traces (:trace) # - Build variables (:variables) # - GitLab Pages SSL cert/key info (:certificate, :encrypted_key) # - Webhook URLs (:hook) @@ -75,6 +79,7 @@ module Gitlab key otp_attempt sentry_dsn + trace variables ) @@ -96,22 +101,26 @@ module Gitlab # Enable the asset pipeline config.assets.enabled = true + # Support legacy unicode file named img emojis, `1F939.png` config.assets.paths << Gemojione.images_path - config.assets.paths << "vendor/assets/fonts" - config.assets.precompile << "*.png" + config.assets.paths << "#{config.root}/vendor/assets/fonts" + config.assets.precompile << "print.css" config.assets.precompile << "notify.css" config.assets.precompile << "mailers/*.css" - config.assets.precompile << "katex.css" - config.assets.precompile << "katex.js" config.assets.precompile << "xterm/xterm.css" config.assets.precompile << "performance_bar.css" config.assets.precompile << "lib/ace.js" - config.assets.precompile << "vendor/assets/fonts/*" config.assets.precompile << "test.css" config.assets.precompile << "locale/**/app.js" + # Import gitlab-svgs directly from vendored directory + config.assets.paths << "#{config.root}/node_modules/@gitlab-org/gitlab-svgs/dist" + config.assets.precompile << "icons.svg" + config.assets.precompile << "icons.json" + config.assets.precompile << "illustrations/*.svg" + # Version of your assets, change this if you want to expire all your assets config.assets.version = '1.0' @@ -149,6 +158,7 @@ module Gitlab caching_config_hash[:pool_size] = Sidekiq.options[:concurrency] + 5 caching_config_hash[:pool_timeout] = 1 end + config.cache_store = :redis_store, caching_config_hash config.active_record.raise_in_transactional_callbacks = true @@ -160,7 +170,7 @@ module Gitlab ENV['GIT_TERMINAL_PROMPT'] = '0' # Gitlab Read-only middleware support - config.middleware.insert_after ActionDispatch::Flash, 'Gitlab::Middleware::ReadOnly' + config.middleware.insert_after ActionDispatch::Flash, '::Gitlab::Middleware::ReadOnly' config.generators do |g| g.factory_bot false @@ -188,4 +198,10 @@ module Gitlab Gitlab::Routing.add_helpers(MilestonesRoutingHelper) end end + + # This method is used for smooth upgrading from the current Rails 4.x to Rails 5.0. + # https://gitlab.com/gitlab-org/gitlab-ce/issues/14286 + def self.rails5? + ENV["RAILS5"].in?(%w[1 true]) + end end diff --git a/config/boot.rb b/config/boot.rb index f2830ae3166..84f390f3228 100644 --- a/config/boot.rb +++ b/config/boot.rb @@ -1,6 +1,11 @@ -require 'rubygems' +def rails5? + %w[1 true].include?(ENV["RAILS5"]) +end -# Set up gems listed in the Gemfile. -ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../../Gemfile', __FILE__) +require 'rubygems' unless rails5? + +gemfile = rails5? ? "Gemfile.rails5" : "Gemfile" +ENV['BUNDLE_GEMFILE'] ||= File.expand_path("../#{gemfile}", __dir__) +# Set up gems listed in the Gemfile. require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE']) diff --git a/config/dependency_decisions.yml b/config/dependency_decisions.yml index 60df92a44fc..6616b85129e 100644 --- a/config/dependency_decisions.yml +++ b/config/dependency_decisions.yml @@ -503,3 +503,34 @@ :versions: - 1.0.9 :when: 2017-11-16 13:02:06.765282000 Z +- - :license + - JSONStream + - MIT + - :who: Tim Zallmann + :why: https://github.com/dominictarr/JSONStream/blob/master/LICENSE.MIT + :versions: [] + :when: 2018-01-17 22:46:12.367554000 Z +- - :approve + - uws + - :who: Tim Zallmann + :why: zlib license + Development Lib + https://github.com/uNetworking/uWebSockets/blob/master/LICENSE + :versions: [] + :when: 2018-01-17 23:46:12.367554000 Z +- - :approve + - atob + - :who: Mike Greiling + :why: https://github.com/node-browser-compat/atob/blob/master/LICENSE + :versions: [] + :when: 2018-02-20 19:42:08.409887000 Z +- - :approve + - cyclist + - :who: Mike Greiling + :why: https://github.com/mafintosh/cyclist/blob/master/LICENSE + :versions: [] + :when: 2018-02-20 21:37:43.774978000 Z +- - :approve + - bitsyntax + - :who: Mike Greiling + :why: https://github.com/squaremo/bitsyntax-js/blob/master/LICENSE-MIT + :versions: [] + :when: 2018-02-20 22:20:25.958123000 Z diff --git a/config/environment.rb b/config/environment.rb index df3006d349c..487a4564b47 100644 --- a/config/environment.rb +++ b/config/environment.rb @@ -1,5 +1,11 @@ # Load the rails application -require File.expand_path('../application', __FILE__) + +# Remove this condition when upgraded to rails 5.0. +if %w[1 true].include?(ENV["RAILS5"]) + require_relative 'application' +else + require File.expand_path('../application', __FILE__) +end # Initialize the rails application Rails.application.initialize! diff --git a/config/environments/production.rb b/config/environments/production.rb index c5cbfcf64cf..9941987929c 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -9,7 +9,11 @@ Rails.application.configure do config.action_controller.perform_caching = true # Disable Rails's static asset server (Apache or nginx will already do this) - config.serve_static_files = false + if Gitlab.rails5? + config.public_file_server.enabled = false + else + config.serve_static_files = false + end # Compress JavaScripts and CSS. config.assets.js_compressor = :uglifier diff --git a/config/environments/test.rb b/config/environments/test.rb index d09e51e766a..1849c984351 100644 --- a/config/environments/test.rb +++ b/config/environments/test.rb @@ -18,7 +18,13 @@ Rails.application.configure do # Configure static asset server for tests with Cache-Control for performance config.assets.compile = false if ENV['CI'] - config.serve_static_files = true + + if Gitlab.rails5? + config.public_file_server.enabled = true + else + config.serve_static_files = true + end + config.static_cache_control = "public, max-age=3600" # Show full error reports and disable caching diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example index f2f05b3eeb2..8c39a1f2aa9 100644 --- a/config/gitlab.yml.example +++ b/config/gitlab.yml.example @@ -145,12 +145,57 @@ production: &base enabled: true # The location where build artifacts are stored (default: shared/artifacts). # path: shared/artifacts + # object_store: + # enabled: false + # remote_directory: artifacts # The bucket name + # background_upload: false # Temporary option to limit automatic upload (Default: true) + # proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage + # connection: + # provider: AWS # Only AWS supported at the moment + # aws_access_key_id: AWS_ACCESS_KEY_ID + # aws_secret_access_key: AWS_SECRET_ACCESS_KEY + # region: us-east-1 ## Git LFS lfs: enabled: true # The location where LFS objects are stored (default: shared/lfs-objects). # storage_path: shared/lfs-objects + object_store: + enabled: false + remote_directory: lfs-objects # Bucket name + # direct_upload: false # Use Object Storage directly for uploads instead of background uploads if enabled (Default: false) + # background_upload: false # Temporary option to limit automatic upload (Default: true) + # proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage + connection: + provider: AWS + aws_access_key_id: AWS_ACCESS_KEY_ID + aws_secret_access_key: AWS_SECRET_ACCESS_KEY + region: us-east-1 + # Use the following options to configure an AWS compatible host + # host: 'localhost' # default: s3.amazonaws.com + # endpoint: 'http://127.0.0.1:9000' # default: nil + # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object' + + ## Uploads (attachments, avatars, etc...) + uploads: + # The location where uploads objects are stored (default: public/). + # storage_path: public/ + # base_dir: uploads/-/system + object_store: + enabled: false + # remote_directory: uploads # Bucket name + # direct_upload: false # Use Object Storage directly for uploads instead of background uploads if enabled (Default: false) + # background_upload: false # Temporary option to limit automatic upload (Default: true) + # proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage + connection: + provider: AWS + aws_access_key_id: AWS_ACCESS_KEY_ID + aws_secret_access_key: AWS_SECRET_ACCESS_KEY + region: us-east-1 + # host: 'localhost' # default: s3.amazonaws.com + # endpoint: 'http://127.0.0.1:9000' # default: nil + # path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object' ## GitLab Pages pages: @@ -175,14 +220,20 @@ production: &base host: 'https://mattermost.example.com' ## Gravatar - ## For Libravatar see: http://doc.gitlab.com/ce/customization/libravatar.html + ## If using gravatar.com, there's nothing to change here. For Libravatar + ## you'll need to provide the custom URLs. For more information, + ## see: https://docs.gitlab.com/ee/customization/libravatar.html gravatar: - # gravatar urls: possible placeholders: %{hash} %{size} %{email} %{username} - # plain_url: "http://..." # default: http://www.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon + # Gravatar/Libravatar URLs: possible placeholders: %{hash} %{size} %{email} %{username} + # plain_url: "http://..." # default: https://www.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon # ssl_url: "https://..." # default: https://secure.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon + ## Sidekiq + sidekiq: + log_format: default # (json is also supported) + ## Auxiliary jobs - # Periodically executed jobs, to self-heal Gitlab, do external synchronizations, etc. + # Periodically executed jobs, to self-heal GitLab, do external synchronizations, etc. # Please read here for more information: https://github.com/ondrejbartas/sidekiq-cron#adding-cron-job cron_jobs: # Flag stuck CI jobs as failed @@ -206,6 +257,10 @@ production: &base repository_archive_cache_worker: cron: "0 * * * *" + # Verify custom GitLab Pages domains + pages_domain_verification_cron_worker: + cron: "*/15 * * * *" + registry: # enabled: true # host: registry.example.com @@ -362,6 +417,9 @@ production: &base first_name: 'givenName' last_name: 'sn' + # If lowercase_usernames is enabled, GitLab will lower case the username. + lowercase_usernames: false + # GitLab EE only: add more LDAP servers # Choose an ID made of a-z and 0-9 . This ID will be stored in the database # so that GitLab can remember which LDAP server a user belongs to. @@ -466,7 +524,17 @@ production: &base # - { name: 'twitter', # app_id: 'YOUR_APP_ID', # app_secret: 'YOUR_APP_SECRET' } - # + # - { name: 'jwt', + # app_secret: 'YOUR_APP_SECRET', + # args: { + # algorithm: 'HS256', + # uid_claim: 'email', + # required_claims: ["name", "email"], + # info_map: { name: "name", email: "email" }, + # auth_url: 'https://example.com/', + # valid_within: nil, + # } + # } # - { name: 'saml', # label: 'Our SAML Provider', # groups_attribute: 'Groups', @@ -640,6 +708,39 @@ test: enabled: true lfs: enabled: false + # The location where LFS objects are stored (default: shared/lfs-objects). + # storage_path: shared/lfs-objects + object_store: + enabled: false + remote_directory: lfs-objects # The bucket name + connection: + provider: AWS # Only AWS supported at the moment + aws_access_key_id: AWS_ACCESS_KEY_ID + aws_secret_access_key: AWS_SECRET_ACCESS_KEY + region: us-east-1 + artifacts: + path: tmp/tests/artifacts + enabled: true + # The location where build artifacts are stored (default: shared/artifacts). + # path: shared/artifacts + object_store: + enabled: false + remote_directory: artifacts # The bucket name + background_upload: false + connection: + provider: AWS # Only AWS supported at the moment + aws_access_key_id: AWS_ACCESS_KEY_ID + aws_secret_access_key: AWS_SECRET_ACCESS_KEY + region: us-east-1 + uploads: + storage_path: tmp/tests/public + object_store: + enabled: false + connection: + provider: AWS # Only AWS supported at the moment + aws_access_key_id: AWS_ACCESS_KEY_ID + aws_secret_access_key: AWS_SECRET_ACCESS_KEY + region: us-east-1 gitlab: host: localhost port: 80 @@ -650,8 +751,6 @@ test: # user: YOUR_USERNAME pages: path: tmp/tests/pages - artifacts: - path: tmp/tests/artifacts repositories: storages: default: @@ -716,6 +815,17 @@ test: - { name: 'twitter', app_id: 'YOUR_APP_ID', app_secret: 'YOUR_APP_SECRET' } + - { name: 'jwt', + app_secret: 'YOUR_APP_SECRET', + args: { + algorithm: 'HS256', + uid_claim: 'email', + required_claims: ["name", "email"], + info_map: { name: "name", email: "email" }, + auth_url: 'https://example.com/', + valid_within: nil, + } + } - { name: 'auth0', args: { client_id: 'YOUR_AUTH0_CLIENT_ID', diff --git a/config/initializers/0_as_concern.rb b/config/initializers/0_as_concern.rb new file mode 100644 index 00000000000..40232bd6252 --- /dev/null +++ b/config/initializers/0_as_concern.rb @@ -0,0 +1,25 @@ +# This module is based on: https://gist.github.com/bcardarella/5735987 + +module Prependable + def prepend_features(base) + if base.instance_variable_defined?(:@_dependencies) + base.instance_variable_get(:@_dependencies) << self + false + else + return false if base < self + + super + base.singleton_class.send(:prepend, const_get('ClassMethods')) if const_defined?(:ClassMethods) + @_dependencies.each { |dep| base.send(:prepend, dep) } # rubocop:disable Gitlab/ModuleWithInstanceVariables + base.class_eval(&@_included_block) if instance_variable_defined?(:@_included_block) # rubocop:disable Gitlab/ModuleWithInstanceVariables + end + end +end + +module ActiveSupport + module Concern + prepend Prependable + + alias_method :prepended, :included + end +end diff --git a/config/initializers/0_post_deployment_migrations.rb b/config/initializers/0_post_deployment_migrations.rb index 0068a03d214..3d81b869b52 100644 --- a/config/initializers/0_post_deployment_migrations.rb +++ b/config/initializers/0_post_deployment_migrations.rb @@ -2,11 +2,13 @@ # before other initializers as Rails may otherwise memoize a list of migrations # excluding the post deployment migrations. unless ENV['SKIP_POST_DEPLOYMENT_MIGRATIONS'] - path = Rails.root.join('db', 'post_migrate').to_s + Rails.application.config.paths['db'].each do |db_path| + path = Rails.root.join(db_path, 'post_migrate').to_s - Rails.application.config.paths['db/migrate'] << path + Rails.application.config.paths['db/migrate'] << path - # Rails memoizes migrations at certain points where it won't read the above - # path just yet. As such we must also update the following list of paths. - ActiveRecord::Migrator.migrations_paths << path + # Rails memoizes migrations at certain points where it won't read the above + # path just yet. As such we must also update the following list of paths. + ActiveRecord::Migrator.migrations_paths << path + end end diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb index f10f0cdf42c..acf7754abe6 100644 --- a/config/initializers/1_settings.rb +++ b/config/initializers/1_settings.rb @@ -68,6 +68,7 @@ class Settings < Settingslogic end values.delete_if { |value| value.nil? } end + values end @@ -78,6 +79,7 @@ class Settings < Settingslogic if current.is_a? String value = modul.const_get(current.upcase) rescue default end + value end @@ -108,7 +110,7 @@ class Settings < Settingslogic url = "http://#{url}" unless url.start_with?('http') # Get rid of the path so that we don't even have to encode it - url_without_path = url.sub(%r{(https?://[^\/]+)/?.*}, '\1') + url_without_path = url.sub(%r{(https?://[^/]+)/?.*}, '\1') URI.parse(url_without_path).host end @@ -149,6 +151,7 @@ if Settings.ldap['enabled'] || Rails.env.test? server['allow_username_or_email_login'] = false if server['allow_username_or_email_login'].nil? server['active_directory'] = true if server['active_directory'].nil? server['attributes'] = {} if server['attributes'].nil? + server['lowercase_usernames'] = false if server['lowercase_usernames'].nil? server['provider_name'] ||= "ldap#{key}".downcase server['provider_class'] = OmniAuth::Utils.camelize(server['provider_name']) @@ -259,7 +262,7 @@ Settings.gitlab['signup_enabled'] ||= true if Settings.gitlab['signup_enabled']. Settings.gitlab['signin_enabled'] ||= true if Settings.gitlab['signin_enabled'].nil? Settings.gitlab['restricted_visibility_levels'] = Settings.__send__(:verify_constant_array, Gitlab::VisibilityLevel, Settings.gitlab['restricted_visibility_levels'], []) Settings.gitlab['username_changing_enabled'] = true if Settings.gitlab['username_changing_enabled'].nil? -Settings.gitlab['issue_closing_pattern'] = '((?:[Cc]los(?:e[sd]?|ing)|[Ff]ix(?:e[sd]|ing)?|[Rr]esolv(?:e[sd]?|ing)|[Ii]mplement(?:s|ed|ing)?)(:?) +(?:(?:issues? +)?%{issue_ref}(?:(?:, *| +and +)?)|([A-Z][A-Z0-9_]+-\d+))+)' if Settings.gitlab['issue_closing_pattern'].nil? +Settings.gitlab['issue_closing_pattern'] = '((?:[Cc]los(?:e[sd]?|ing)|[Ff]ix(?:e[sd]|ing)?|[Rr]esolv(?:e[sd]?|ing)|[Ii]mplement(?:s|ed|ing)?)(:?) +(?:(?:issues? +)?%{issue_ref}(?:(?: *,? +and +| *, *)?)|([A-Z][A-Z0-9_]+-\d+))+)' if Settings.gitlab['issue_closing_pattern'].nil? Settings.gitlab['default_projects_features'] ||= {} Settings.gitlab['webhook_timeout'] ||= 10 Settings.gitlab['max_attachment_size'] ||= 10 @@ -298,8 +301,18 @@ Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled'] # Settings['artifacts'] ||= Settingslogic.new({}) Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil? -Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts")) -Settings.artifacts['max_size'] ||= 100 # in megabytes +Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts")) +# Settings.artifact['path'] is deprecated, use `storage_path` instead +Settings.artifacts['path'] = Settings.artifacts['storage_path'] +Settings.artifacts['max_size'] ||= 100 # in megabytes +Settings.artifacts['object_store'] ||= Settingslogic.new({}) +Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil? +Settings.artifacts['object_store']['remote_directory'] ||= nil +Settings.artifacts['object_store']['direct_upload'] = false if Settings.artifacts['object_store']['direct_upload'].nil? +Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil? +Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil? +# Convert upload connection settings to use string keys, to make Fog happy +Settings.artifacts['object_store']['connection']&.deep_stringify_keys! # # Registry @@ -335,6 +348,29 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa Settings['lfs'] ||= Settingslogic.new({}) Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil? Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects")) +Settings.lfs['object_store'] ||= Settingslogic.new({}) +Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil? +Settings.lfs['object_store']['remote_directory'] ||= nil +Settings.lfs['object_store']['direct_upload'] = false if Settings.lfs['object_store']['direct_upload'].nil? +Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil? +Settings.lfs['object_store']['proxy_download'] = false if Settings.lfs['object_store']['proxy_download'].nil? +# Convert upload connection settings to use string keys, to make Fog happy +Settings.lfs['object_store']['connection']&.deep_stringify_keys! + +# +# Uploads +# +Settings['uploads'] ||= Settingslogic.new({}) +Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public') +Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system' +Settings.uploads['object_store'] ||= Settingslogic.new({}) +Settings.uploads['object_store']['enabled'] = false if Settings.uploads['object_store']['enabled'].nil? +Settings.uploads['object_store']['remote_directory'] ||= 'uploads' +Settings.uploads['object_store']['direct_upload'] = false if Settings.uploads['object_store']['direct_upload'].nil? +Settings.uploads['object_store']['background_upload'] = true if Settings.uploads['object_store']['background_upload'].nil? +Settings.uploads['object_store']['proxy_download'] = false if Settings.uploads['object_store']['proxy_download'].nil? +# Convert upload connection settings to use string keys, to make Fog happy +Settings.uploads['object_store']['connection']&.deep_stringify_keys! # # Mattermost @@ -348,7 +384,7 @@ Settings.mattermost['host'] = nil unless Settings.mattermost.enabled # Settings['gravatar'] ||= Settingslogic.new({}) Settings.gravatar['enabled'] = true if Settings.gravatar['enabled'].nil? -Settings.gravatar['plain_url'] ||= 'http://www.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon' +Settings.gravatar['plain_url'] ||= 'https://www.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon' Settings.gravatar['ssl_url'] ||= 'https://secure.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon' Settings.gravatar['host'] = Settings.host_without_www(Settings.gravatar['plain_url']) @@ -415,6 +451,16 @@ Settings.cron_jobs['stuck_merge_jobs_worker'] ||= Settingslogic.new({}) Settings.cron_jobs['stuck_merge_jobs_worker']['cron'] ||= '0 */2 * * *' Settings.cron_jobs['stuck_merge_jobs_worker']['job_class'] = 'StuckMergeJobsWorker' +Settings.cron_jobs['pages_domain_verification_cron_worker'] ||= Settingslogic.new({}) +Settings.cron_jobs['pages_domain_verification_cron_worker']['cron'] ||= '*/15 * * * *' +Settings.cron_jobs['pages_domain_verification_cron_worker']['job_class'] = 'PagesDomainVerificationCronWorker' + +# +# Sidekiq +# +Settings['sidekiq'] ||= Settingslogic.new({}) +Settings['sidekiq']['log_format'] ||= 'default' + # # GitLab Shell # @@ -451,12 +497,7 @@ unless Settings.repositories.storages['default'] end Settings.repositories.storages.each do |key, storage| - storage = Settingslogic.new(storage) - - # Expand relative paths - storage['path'] = Settings.absolute(storage['path']) - - Settings.repositories.storages[key] = storage + Settings.repositories.storages[key] = Gitlab::GitalyClient::StorageSettings.new(storage) end # @@ -467,10 +508,10 @@ end # repository_downloads_path value. # repositories_storages = Settings.repositories.storages.values -repository_downloads_path = Settings.gitlab['repository_downloads_path'].to_s.gsub(/\/$/, '') +repository_downloads_path = Settings.gitlab['repository_downloads_path'].to_s.gsub(%r{/$}, '') repository_downloads_full_path = File.expand_path(repository_downloads_path, Settings.gitlab['user_home']) -if repository_downloads_path.blank? || repositories_storages.any? { |rs| [repository_downloads_path, repository_downloads_full_path].include?(rs['path'].gsub(/\/$/, '')) } +if repository_downloads_path.blank? || repositories_storages.any? { |rs| [repository_downloads_path, repository_downloads_full_path].include?(rs.legacy_disk_path.gsub(%r{/$}, '')) } Settings.gitlab['repository_downloads_path'] = File.join(Settings.shared['path'], 'cache/archive') end diff --git a/config/initializers/6_validations.rb b/config/initializers/6_validations.rb index f8e67ce04c9..d92cdb97766 100644 --- a/config/initializers/6_validations.rb +++ b/config/initializers/6_validations.rb @@ -5,7 +5,7 @@ end def find_parent_path(name, path) parent = Pathname.new(path).realpath.parent Gitlab.config.repositories.storages.detect do |n, rs| - name != n && Pathname.new(rs['path']).realpath == parent + name != n && Pathname.new(rs.legacy_disk_path).realpath == parent end rescue Errno::EIO, Errno::ENOENT => e warning = "WARNING: couldn't verify #{path} (#{name}). "\ @@ -33,7 +33,7 @@ def validate_storages_config "If you're using the Gitlab Development Kit, you can update your configuration running `gdk reconfigure`.\n" end - if !repository_storage.is_a?(Hash) || repository_storage['path'].nil? + if !repository_storage.is_a?(Gitlab::GitalyClient::StorageSettings) || repository_storage.legacy_disk_path.nil? storage_validation_error("#{name} is not a valid storage, because it has no `path` key. Refer to gitlab.yml.example for an updated example") end @@ -50,7 +50,7 @@ end def validate_storages_paths Gitlab.config.repositories.storages.each do |name, repository_storage| - parent_name, _parent_path = find_parent_path(name, repository_storage['path']) + parent_name, _parent_path = find_parent_path(name, repository_storage.legacy_disk_path) if parent_name storage_validation_error("#{name} is a nested path of #{parent_name}. Nested paths are not supported for repository storages") end diff --git a/config/initializers/8_metrics.rb b/config/initializers/8_metrics.rb index 45b39b2a38d..7cdf49159b4 100644 --- a/config/initializers/8_metrics.rb +++ b/config/initializers/8_metrics.rb @@ -94,6 +94,7 @@ def instrument_classes(instrumentation) instrumentation.instrument_instance_methods(RepositoryCheck::SingleRepositoryWorker) + instrumentation.instrument_instance_methods(Rouge::Plugins::CommonMark) instrumentation.instrument_instance_methods(Rouge::Plugins::Redcarpet) instrumentation.instrument_instance_methods(Rouge::Formatters::HTMLGitlab) diff --git a/config/initializers/active_record_array_type_casting.rb b/config/initializers/active_record_array_type_casting.rb index d94d592add6..a149e048ee2 100644 --- a/config/initializers/active_record_array_type_casting.rb +++ b/config/initializers/active_record_array_type_casting.rb @@ -1,20 +1,23 @@ -module ActiveRecord - class PredicateBuilder - class ArrayHandler - module TypeCasting - def call(attribute, value) - # This is necessary because by default ActiveRecord does not respect - # custom type definitions (like our `ShaAttribute`) when providing an - # array in `where`, like in `where(commit_sha: [sha1, sha2, sha3])`. - model = attribute.relation&.engine - type = model.user_provided_columns[attribute.name] if model - value = value.map { |value| type.type_cast_for_database(value) } if type +# Remove this initializer when upgraded to Rails 5.0 +unless Gitlab.rails5? + module ActiveRecord + class PredicateBuilder + class ArrayHandler + module TypeCasting + def call(attribute, value) + # This is necessary because by default ActiveRecord does not respect + # custom type definitions (like our `ShaAttribute`) when providing an + # array in `where`, like in `where(commit_sha: [sha1, sha2, sha3])`. + model = attribute.relation&.engine + type = model.user_provided_columns[attribute.name] if model + value = value.map { |value| type.type_cast_for_database(value) } if type - super(attribute, value) + super(attribute, value) + end end - end - prepend TypeCasting + prepend TypeCasting + end end end end diff --git a/config/initializers/active_record_data_types.rb b/config/initializers/active_record_data_types.rb index fef591c397d..fda13d0c4cb 100644 --- a/config/initializers/active_record_data_types.rb +++ b/config/initializers/active_record_data_types.rb @@ -54,7 +54,7 @@ elsif Gitlab::Database.mysql? def initialize_type_map(mapping) super mapping - mapping.register_type(%r(timestamp)i) do |sql_type| + mapping.register_type(/timestamp/i) do |sql_type| precision = extract_precision(sql_type) ActiveRecord::ConnectionAdapters::AbstractMysqlAdapter::MysqlDateTimeWithTimeZone.new(precision: precision) end @@ -79,3 +79,8 @@ elsif Gitlab::Database.mysql? NATIVE_DATABASE_TYPES[:datetime_with_timezone] = { name: 'timestamp' } end end + +# Ensure `datetime_with_timezone` columns are correctly written to schema.rb +if (ActiveRecord::Base.connection.active? rescue false) + ActiveRecord::Base.connection.send :reload_type_map +end diff --git a/config/initializers/active_record_locking.rb b/config/initializers/active_record_locking.rb index 150aaa2a8c2..3e7111fd063 100644 --- a/config/initializers/active_record_locking.rb +++ b/config/initializers/active_record_locking.rb @@ -1,73 +1,77 @@ # rubocop:disable Lint/RescueException -# This patch fixes https://github.com/rails/rails/issues/26024 -# TODO: Remove it when it's no longer necessary - -module ActiveRecord - module Locking - module Optimistic - # We overwrite this method because we don't want to have default value - # for newly created records - def _create_record(attribute_names = self.attribute_names, *) # :nodoc: - super - end +# Remove this entire initializer when we are at rails 5.0. +# This file fixes the bug (see below) which has been fixed in the upstream. +unless Gitlab.rails5? + # This patch fixes https://github.com/rails/rails/issues/26024 + # TODO: Remove it when it's no longer necessary + + module ActiveRecord + module Locking + module Optimistic + # We overwrite this method because we don't want to have default value + # for newly created records + def _create_record(attribute_names = self.attribute_names, *) # :nodoc: + super + end - def _update_record(attribute_names = self.attribute_names) #:nodoc: - return super unless locking_enabled? - return 0 if attribute_names.empty? + def _update_record(attribute_names = self.attribute_names) #:nodoc: + return super unless locking_enabled? + return 0 if attribute_names.empty? - lock_col = self.class.locking_column + lock_col = self.class.locking_column - previous_lock_value = send(lock_col).to_i # rubocop:disable GitlabSecurity/PublicSend + previous_lock_value = send(lock_col).to_i # rubocop:disable GitlabSecurity/PublicSend - # This line is added as a patch - previous_lock_value = nil if previous_lock_value == '0' || previous_lock_value == 0 + # This line is added as a patch + previous_lock_value = nil if previous_lock_value == '0' || previous_lock_value == 0 - increment_lock + increment_lock - attribute_names += [lock_col] - attribute_names.uniq! + attribute_names += [lock_col] + attribute_names.uniq! - begin - relation = self.class.unscoped + begin + relation = self.class.unscoped - affected_rows = relation.where( - self.class.primary_key => id, - lock_col => previous_lock_value - ).update_all( - attributes_for_update(attribute_names).map do |name| - [name, _read_attribute(name)] - end.to_h - ) + affected_rows = relation.where( + self.class.primary_key => id, + lock_col => previous_lock_value + ).update_all( + attributes_for_update(attribute_names).map do |name| + [name, _read_attribute(name)] + end.to_h + ) - unless affected_rows == 1 - raise ActiveRecord::StaleObjectError.new(self, "update") - end + unless affected_rows == 1 + raise ActiveRecord::StaleObjectError.new(self, "update") + end - affected_rows + affected_rows - # If something went wrong, revert the version. - rescue Exception - send(lock_col + '=', previous_lock_value) # rubocop:disable GitlabSecurity/PublicSend - raise + # If something went wrong, revert the version. + rescue Exception + send(lock_col + '=', previous_lock_value) # rubocop:disable GitlabSecurity/PublicSend + raise + end end - end - # This is patched because we need it to query `lock_version IS NULL` - # rather than `lock_version = 0` whenever lock_version is NULL. - def relation_for_destroy - return super unless locking_enabled? + # This is patched because we need it to query `lock_version IS NULL` + # rather than `lock_version = 0` whenever lock_version is NULL. + def relation_for_destroy + return super unless locking_enabled? - column_name = self.class.locking_column - super.where(self.class.arel_table[column_name].eq(self[column_name])) + column_name = self.class.locking_column + super.where(self.class.arel_table[column_name].eq(self[column_name])) + end end - end - # This is patched because we want `lock_version` default to `NULL` - # rather than `0` - class LockingType < SimpleDelegator - def type_cast_from_database(value) - super + # This is patched because we want `lock_version` default to `NULL` + # rather than `0` + class LockingType < SimpleDelegator + def type_cast_from_database(value) + super + end end end end diff --git a/config/initializers/application_controller_renderer.rb b/config/initializers/application_controller_renderer.rb new file mode 100644 index 00000000000..a65f8aecf9e --- /dev/null +++ b/config/initializers/application_controller_renderer.rb @@ -0,0 +1,12 @@ +# Remove this `if` condition when upgraded to rails 5.0. +# The body must be kept. +if Gitlab.rails5? + # Be sure to restart your server when you modify this file. + + # ActiveSupport::Reloader.to_prepare do + # ApplicationController.renderer.defaults.merge!( + # http_host: 'example.org', + # https: false + # ) + # end +end diff --git a/config/initializers/ar5_batching.rb b/config/initializers/ar5_batching.rb index 6ebaf8834d2..874455ce5af 100644 --- a/config/initializers/ar5_batching.rb +++ b/config/initializers/ar5_batching.rb @@ -1,41 +1,39 @@ -# Port ActiveRecord::Relation#in_batches from ActiveRecord 5. -# https://github.com/rails/rails/blob/ac027338e4a165273607dccee49a3d38bc836794/activerecord/lib/active_record/relation/batches.rb#L184 -# TODO: this can be removed once we're using AR5. -raise "Vendored ActiveRecord 5 code! Delete #{__FILE__}!" if ActiveRecord::VERSION::MAJOR >= 5 - -module ActiveRecord - module Batches - # Differences from upstream: enumerator support was removed, and custom - # order/limit clauses are ignored without a warning. - def in_batches(of: 1000, start: nil, finish: nil, load: false) - raise "Must provide a block" unless block_given? - - relation = self.reorder(batch_order).limit(of) - relation = relation.where(arel_table[primary_key].gteq(start)) if start - relation = relation.where(arel_table[primary_key].lteq(finish)) if finish - batch_relation = relation - - loop do - if load - records = batch_relation.records - ids = records.map(&:id) - yielded_relation = self.where(primary_key => ids) - yielded_relation.load_records(records) - else - ids = batch_relation.pluck(primary_key) - yielded_relation = self.where(primary_key => ids) +# Remove this file when upgraded to rails 5.0. +unless Gitlab.rails5? + module ActiveRecord + module Batches + # Differences from upstream: enumerator support was removed, and custom + # order/limit clauses are ignored without a warning. + def in_batches(of: 1000, start: nil, finish: nil, load: false) + raise "Must provide a block" unless block_given? + + relation = self.reorder(batch_order).limit(of) + relation = relation.where(arel_table[primary_key].gteq(start)) if start + relation = relation.where(arel_table[primary_key].lteq(finish)) if finish + batch_relation = relation + + loop do + if load + records = batch_relation.records + ids = records.map(&:id) + yielded_relation = self.where(primary_key => ids) + yielded_relation.load_records(records) + else + ids = batch_relation.pluck(primary_key) + yielded_relation = self.where(primary_key => ids) + end + + break if ids.empty? + + primary_key_offset = ids.last + raise ArgumentError.new("Primary key not included in the custom select clause") unless primary_key_offset + + yield yielded_relation + + break if ids.length < of + + batch_relation = relation.where(arel_table[primary_key].gt(primary_key_offset)) end - - break if ids.empty? - - primary_key_offset = ids.last - raise ArgumentError.new("Primary key not included in the custom select clause") unless primary_key_offset - - yield yielded_relation - - break if ids.length < of - - batch_relation = relation.where(arel_table[primary_key].gt(primary_key_offset)) end end end diff --git a/config/initializers/ar5_pg_10_support.rb b/config/initializers/ar5_pg_10_support.rb new file mode 100644 index 00000000000..40548290ce8 --- /dev/null +++ b/config/initializers/ar5_pg_10_support.rb @@ -0,0 +1,58 @@ +# Remove this file when upgraded to rails 5.0. +if !Gitlab.rails5? && Gitlab::Database.postgresql? + require 'active_record/connection_adapters/postgresql_adapter' + require 'active_record/connection_adapters/postgresql/schema_statements' + + # + # Monkey-patch the refused Rails 4.2 patch at https://github.com/rails/rails/pull/31330 + # + # Updates sequence logic to support PostgreSQL 10. + # + # rubocop:disable all + module ActiveRecord + module ConnectionAdapters + + # We need #postgresql_version to be public as in ActiveRecord 5 for seed_fu + # to work. In ActiveRecord 4, it is protected. + # https://github.com/mbleigh/seed-fu/issues/123 + class PostgreSQLAdapter + public :postgresql_version + end + + module PostgreSQL + module SchemaStatements + # Resets the sequence of a table's primary key to the maximum value. + def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc: + unless pk and sequence + default_pk, default_sequence = pk_and_sequence_for(table) + + pk ||= default_pk + sequence ||= default_sequence + end + + if @logger && pk && !sequence + @logger.warn "#{table} has primary key #{pk} with no default sequence" + end + + if pk && sequence + quoted_sequence = quote_table_name(sequence) + max_pk = select_value("SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}") + if max_pk.nil? + if postgresql_version >= 100000 + minvalue = select_value("SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass") + else + minvalue = select_value("SELECT min_value FROM #{quoted_sequence}") + end + end + + select_value <<-end_sql, 'SCHEMA' + SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue}, #{max_pk ? true : false}) + end_sql + end + end + end + end + end + end + # rubocop:enable all +end diff --git a/config/initializers/ar_native_database_types.rb b/config/initializers/ar_native_database_types.rb new file mode 100644 index 00000000000..3522b1db536 --- /dev/null +++ b/config/initializers/ar_native_database_types.rb @@ -0,0 +1,11 @@ +require 'active_record/connection_adapters/abstract_mysql_adapter' + +module ActiveRecord + module ConnectionAdapters + class AbstractMysqlAdapter + NATIVE_DATABASE_TYPES.merge!( + bigserial: { name: 'bigint(20) auto_increment PRIMARY KEY' } + ) + end + end +end diff --git a/config/initializers/artifacts_direct_upload_support.rb b/config/initializers/artifacts_direct_upload_support.rb new file mode 100644 index 00000000000..d2bc35ea613 --- /dev/null +++ b/config/initializers/artifacts_direct_upload_support.rb @@ -0,0 +1,7 @@ +artifacts_object_store = Gitlab.config.artifacts.object_store + +if artifacts_object_store.enabled && + artifacts_object_store.direct_upload && + artifacts_object_store.connection&.provider.to_s != 'Google' + raise "Only 'Google' is supported as a object storage provider when 'direct_upload' of artifacts is used" +end diff --git a/config/initializers/asset_sync.rb b/config/initializers/asset_sync.rb index db8500f6231..7f3934853fa 100644 --- a/config/initializers/asset_sync.rb +++ b/config/initializers/asset_sync.rb @@ -14,8 +14,8 @@ AssetSync.configure do |config| config.fog_directory = ENV['FOG_DIRECTORY'] if ENV.has_key?('FOG_DIRECTORY') config.fog_region = ENV['FOG_REGION'] if ENV.has_key?('FOG_REGION') - config.aws_access_key_id = ENV['AWS_ACCESS_KEY_ID'] if ENV.has_key?('AWS_ACCESS_KEY_ID') - config.aws_secret_access_key = ENV['AWS_SECRET_ACCESS_KEY'] if ENV.has_key?('AWS_SECRET_ACCESS_KEY') + config.aws_access_key_id = ENV['ASSETS_AWS_ACCESS_KEY_ID'] if ENV.has_key?('ASSETS_AWS_ACCESS_KEY_ID') + config.aws_secret_access_key = ENV['ASSETS_AWS_SECRET_ACCESS_KEY'] if ENV.has_key?('ASSETS_AWS_SECRET_ACCESS_KEY') config.aws_reduced_redundancy = ENV['AWS_REDUCED_REDUNDANCY'] == true if ENV.has_key?('AWS_REDUCED_REDUNDANCY') config.rackspace_username = ENV['RACKSPACE_USERNAME'] if ENV.has_key?('RACKSPACE_USERNAME') diff --git a/config/initializers/backtrace_silencers.rb b/config/initializers/backtrace_silencers.rb index 59385cdf379..58941aae1b0 100644 --- a/config/initializers/backtrace_silencers.rb +++ b/config/initializers/backtrace_silencers.rb @@ -1,7 +1,2 @@ -# Be sure to restart your server when you modify this file. - -# You can add backtrace silencers for libraries that you're using but don't wish to see in your backtraces. -# Rails.backtrace_cleaner.add_silencer { |line| line =~ /my_noisy_library/ } - -# You can also remove all the silencers if you're trying to debug a problem that might stem from framework code. -# Rails.backtrace_cleaner.remove_silencers! +Rails.backtrace_cleaner.remove_silencers! +Rails.backtrace_cleaner.add_silencer { |line| line !~ Gitlab::APP_DIRS_PATTERN } diff --git a/config/initializers/carrierwave.rb b/config/initializers/carrierwave.rb index cd7df44351a..5cde6cbb0ff 100644 --- a/config/initializers/carrierwave.rb +++ b/config/initializers/carrierwave.rb @@ -28,16 +28,4 @@ if File.exist?(aws_file) # when fog_public is false and provider is AWS or Google, defaults to 600 config.fog_authenticated_url_expiration = 1 << 29 end - - # Mocking Fog requests, based on: https://github.com/carrierwaveuploader/carrierwave/wiki/How-to%3A-Test-Fog-based-uploaders - if Rails.env.test? - Fog.mock! - connection = ::Fog::Storage.new( - aws_access_key_id: AWS_CONFIG['access_key_id'], - aws_secret_access_key: AWS_CONFIG['secret_access_key'], - provider: 'AWS', - region: AWS_CONFIG['region'] - ) - connection.directories.create(key: AWS_CONFIG['bucket']) - end end diff --git a/config/initializers/date_time_formats.rb b/config/initializers/date_time_formats.rb index 57568203cab..1939ced512d 100644 --- a/config/initializers/date_time_formats.rb +++ b/config/initializers/date_time_formats.rb @@ -2,8 +2,10 @@ # :medium - Nov 10, 2007 # :long - November 10, 2007 Date::DATE_FORMATS[:medium] = '%b %-d, %Y' +Date::DATE_FORMATS[:csv] = '%Y-%m-%d' # :short - 18 Jan 06:10 # :medium - Jan 18, 2007 6:10am # :long - January 18, 2007 06:10 Time::DATE_FORMATS[:medium] = '%b %-d, %Y %-I:%M%P' +Time::DATE_FORMATS[:csv] = '%Y-%m-%d %H:%M:%S' diff --git a/config/initializers/devise.rb b/config/initializers/devise.rb index 051ef93b205..362b9cc9a88 100644 --- a/config/initializers/devise.rb +++ b/config/initializers/devise.rb @@ -212,55 +212,12 @@ Devise.setup do |config| # manager.default_strategies(scope: :user).unshift :some_external_strategy # end - if Gitlab::LDAP::Config.enabled? - Gitlab::LDAP::Config.providers.each do |provider| - ldap_config = Gitlab::LDAP::Config.new(provider) + if Gitlab::Auth::LDAP::Config.enabled? + Gitlab::Auth::LDAP::Config.providers.each do |provider| + ldap_config = Gitlab::Auth::LDAP::Config.new(provider) config.omniauth(provider, ldap_config.omniauth_options) end end - Gitlab.config.omniauth.providers.each do |provider| - provider_arguments = [] - - %w[app_id app_secret].each do |argument| - provider_arguments << provider[argument] if provider[argument] - end - - case provider['args'] - when Array - # An Array from the configuration will be expanded. - provider_arguments.concat provider['args'] - when Hash - # Add procs for handling SLO - if provider['name'] == 'cas3' - provider['args'][:on_single_sign_out] = lambda do |request| - ticket = request.params[:session_index] - raise "Service Ticket not found." unless Gitlab::OAuth::Session.valid?(:cas3, ticket) - - Gitlab::OAuth::Session.destroy(:cas3, ticket) - true - end - end - if provider['name'] == 'authentiq' - provider['args'][:remote_sign_out_handler] = lambda do |request| - authentiq_session = request.params['sid'] - if Gitlab::OAuth::Session.valid?(:authentiq, authentiq_session) - Gitlab::OAuth::Session.destroy(:authentiq, authentiq_session) - true - else - false - end - end - end - - if provider['name'] == 'shibboleth' - provider['args'][:fail_with_empty_uid] = true - end - - # A Hash from the configuration will be passed as is. - provider_arguments << provider['args'].symbolize_keys - end - - config.omniauth provider['name'].to_sym, *provider_arguments - end + Gitlab::OmniauthInitializer.new(config).execute(Gitlab.config.omniauth.providers) end diff --git a/config/initializers/doorkeeper.rb b/config/initializers/doorkeeper.rb index b89f0419b91..2079d3acb72 100644 --- a/config/initializers/doorkeeper.rb +++ b/config/initializers/doorkeeper.rb @@ -103,4 +103,6 @@ Doorkeeper.configure do # Some applications require dynamic query parameters on their request_uri # set to true if you want this to be allowed # wildcard_redirect_uri false + + base_controller 'ApplicationController' end diff --git a/config/initializers/doorkeeper_openid_connect.rb b/config/initializers/doorkeeper_openid_connect.rb index af174def047..98e1f6e830f 100644 --- a/config/initializers/doorkeeper_openid_connect.rb +++ b/config/initializers/doorkeeper_openid_connect.rb @@ -31,6 +31,7 @@ Doorkeeper::OpenidConnect.configure do o.claim(:website) { |user| user.full_website_url if user.website_url? } o.claim(:profile) { |user| Gitlab::Routing.url_helpers.user_url user } o.claim(:picture) { |user| user.avatar_url(only_path: false) } + o.claim(:groups) { |user| user.membership_groups.map(&:full_path) } end end end diff --git a/config/initializers/flipper.rb b/config/initializers/flipper.rb index cc9167d29b9..c60ad535fd5 100644 --- a/config/initializers/flipper.rb +++ b/config/initializers/flipper.rb @@ -8,7 +8,7 @@ Flipper.configure do |config| cached_adapter = Flipper::Adapters::ActiveSupportCacheStore.new( adapter, Rails.cache, - expires_in: 10.seconds) + expires_in: 1.hour) Flipper.new(cached_adapter) end diff --git a/config/initializers/fog_google_https_private_urls.rb b/config/initializers/fog_google_https_private_urls.rb new file mode 100644 index 00000000000..f92e623a5d2 --- /dev/null +++ b/config/initializers/fog_google_https_private_urls.rb @@ -0,0 +1,20 @@ +# +# Monkey patching the https support for private urls +# See https://gitlab.com/gitlab-org/gitlab-ee/issues/4879 +# +module Fog + module Storage + class GoogleXML + class File < Fog::Model + module MonkeyPatch + def url(expires) + requires :key + collection.get_https_url(key, expires) + end + end + + prepend MonkeyPatch + end + end + end +end diff --git a/config/initializers/forbid_sidekiq_in_transactions.rb b/config/initializers/forbid_sidekiq_in_transactions.rb index bedd57ede04..4cf1d455eb4 100644 --- a/config/initializers/forbid_sidekiq_in_transactions.rb +++ b/config/initializers/forbid_sidekiq_in_transactions.rb @@ -1,5 +1,7 @@ module Sidekiq module Worker + EnqueueFromTransactionError = Class.new(StandardError) + mattr_accessor :skip_transaction_check self.skip_transaction_check = false @@ -12,17 +14,30 @@ module Sidekiq end module ClassMethods - module NoSchedulingFromTransactions + module NoEnqueueingFromTransactions %i(perform_async perform_at perform_in).each do |name| define_method(name) do |*args| if !Sidekiq::Worker.skip_transaction_check && AfterCommitQueue.inside_transaction? - raise <<-MSG.strip_heredoc + begin + raise Sidekiq::Worker::EnqueueFromTransactionError, <<~MSG `#{self}.#{name}` cannot be called inside a transaction as this can lead to race conditions when the worker runs before the transaction is committed and tries to access a model that has not been saved yet. Use an `after_commit` hook, or include `AfterCommitQueue` and use a `run_after_commit` block instead. - MSG + MSG + rescue Sidekiq::Worker::EnqueueFromTransactionError => e + if Rails.env.production? + Rails.logger.error(e.message) + + if Gitlab::Sentry.enabled? + Gitlab::Sentry.context + Raven.capture_exception(e) + end + else + raise + end + end end super(*args) @@ -30,7 +45,7 @@ module Sidekiq end end - prepend NoSchedulingFromTransactions + prepend NoEnqueueingFromTransactions end end end diff --git a/config/initializers/gollum.rb b/config/initializers/gollum.rb index f1066f83dd9..6dfaceb8427 100644 --- a/config/initializers/gollum.rb +++ b/config/initializers/gollum.rb @@ -35,6 +35,108 @@ module Gollum [] end end + + # Remove if https://github.com/gollum/gollum-lib/pull/292 has been merged + def update_page(page, name, format, data, commit = {}) + name = name ? ::File.basename(name) : page.name + format ||= page.format + dir = ::File.dirname(page.path) + dir = '' if dir == '.' + filename = (rename = page.name != name) ? Gollum::Page.cname(name) : page.filename_stripped + + multi_commit = !!commit[:committer] + committer = multi_commit ? commit[:committer] : Committer.new(self, commit) + + if !rename && page.format == format + committer.add(page.path, normalize(data)) + else + committer.delete(page.path) + committer.add_to_index(dir, filename, format, data) + end + + committer.after_commit do |index, _sha| + @access.refresh + index.update_working_dir(dir, page.filename_stripped, page.format) + index.update_working_dir(dir, filename, format) + end + + multi_commit ? committer : committer.commit + end + + # Remove if https://github.com/gollum/gollum-lib/pull/292 has been merged + def rename_page(page, rename, commit = {}) + return false if page.nil? + return false if rename.nil? || rename.empty? + + (target_dir, target_name) = ::File.split(rename) + (source_dir, source_name) = ::File.split(page.path) + source_name = page.filename_stripped + + # File.split gives us relative paths with ".", commiter.add_to_index doesn't like that. + target_dir = '' if target_dir == '.' + source_dir = '' if source_dir == '.' + target_dir = target_dir.gsub(/^\//, '') # rubocop:disable Style/RegexpLiteral + + # if the rename is a NOOP, abort + if source_dir == target_dir && source_name == target_name + return false + end + + multi_commit = !!commit[:committer] + committer = multi_commit ? commit[:committer] : Committer.new(self, commit) + + # This piece only works for multi_commit + # If we are in a commit batch and one of the previous operations + # has updated the page, any information we ask to the page can be outdated. + # Therefore, we should ask first to the current committer tree to see if + # there is any updated change. + raw_data = raw_data_in_committer(committer, source_dir, page.filename) || + raw_data_in_committer(committer, source_dir, "#{target_name}.#{Page.format_to_ext(page.format)}") || + page.raw_data + + committer.delete(page.path) + committer.add_to_index(target_dir, target_name, page.format, raw_data) + + committer.after_commit do |index, _sha| + @access.refresh + index.update_working_dir(source_dir, source_name, page.format) + index.update_working_dir(target_dir, target_name, page.format) + end + + multi_commit ? committer : committer.commit + end + + # Remove if https://github.com/gollum/gollum-lib/pull/292 has been merged + def raw_data_in_committer(committer, dir, filename) + data = nil + + [*dir.split(::File::SEPARATOR), filename].each do |key| + data = data ? data[key] : committer.tree[key] + break unless data + end + + data + end + end + + module Git + class Git + def tree_entry(commit, path) + pathname = Pathname.new(path) + tmp_entry = nil + + pathname.each_filename do |dir| + tmp_entry = if tmp_entry.nil? + commit.tree[dir] + else + @repo.lookup(tmp_entry[:oid])[dir] + end + + return nil unless tmp_entry + end + tmp_entry + end + end end end diff --git a/config/initializers/grape_route_helpers_fix.rb b/config/initializers/grape_route_helpers_fix.rb index d3cf9e453d0..612cca3dfbd 100644 --- a/config/initializers/grape_route_helpers_fix.rb +++ b/config/initializers/grape_route_helpers_fix.rb @@ -1,5 +1,21 @@ if defined?(GrapeRouteHelpers) module GrapeRouteHelpers + module AllRoutes + # Bringing in PR https://github.com/reprah/grape-route-helpers/pull/21 due to abandonment. + # + # Without the following fix, when two helper methods are the same, but have different arguments + # (for example: api_v1_cats_owners_path(id: 1) vs api_v1_cats_owners_path(id: 1, owner_id: 2)) + # if the helper method with the least number of arguments is defined first (because the route was defined first) + # then it will shadow the longer route. + # + # The fix is to sort descending by amount of arguments + def decorated_routes + @decorated_routes ||= all_routes + .map { |r| DecoratedRoute.new(r) } + .sort_by { |r| -r.dynamic_path_segments.count } + end + end + class DecoratedRoute # GrapeRouteHelpers gem tries to parse the versions # from a string, not supporting Grape `version` array definition. diff --git a/config/initializers/lograge.rb b/config/initializers/lograge.rb index 8560d24526f..49fdd23064c 100644 --- a/config/initializers/lograge.rb +++ b/config/initializers/lograge.rb @@ -1,3 +1,21 @@ +# Monkey patch lograge until https://github.com/roidrage/lograge/pull/241 is released +module Lograge + class RequestLogSubscriber < ActiveSupport::LogSubscriber + def strip_query_string(path) + index = path.index('?') + index ? path[0, index] : path + end + + def extract_location + location = Thread.current[:lograge_location] + return {} unless location + + Thread.current[:lograge_location] = nil + { location: strip_query_string(location) } + end + end +end + # Only use Lograge for Rails unless Sidekiq.server? filename = File.join(Rails.root, 'log', "#{Rails.env}_json.log") @@ -12,9 +30,14 @@ unless Sidekiq.server? config.lograge.logger = ActiveSupport::Logger.new(filename) # Add request parameters to log output config.lograge.custom_options = lambda do |event| + params = event.payload[:params] + .except(*%w(controller action format)) + .each_pair + .map { |k, v| { key: k, value: v } } + payload = { time: event.time.utc.iso8601(3), - params: event.payload[:params].except(*%w(controller action format)), + params: params, remote_ip: event.payload[:remote_ip], user_id: event.payload[:user_id], username: event.payload[:username] diff --git a/config/initializers/mime_types.rb b/config/initializers/mime_types.rb index 5e3e4c966cb..e9326653cbe 100644 --- a/config/initializers/mime_types.rb +++ b/config/initializers/mime_types.rb @@ -14,4 +14,4 @@ Mime::Type.register "video/webm", :webm Mime::Type.register "video/ogg", :ogv Mime::Type.unregister :json -Mime::Type.register 'application/json', :json, %w(application/vnd.git-lfs+json application/json) +Mime::Type.register 'application/json', :json, [LfsRequest::CONTENT_TYPE, 'application/json'] diff --git a/config/initializers/new_framework_defaults.rb b/config/initializers/new_framework_defaults.rb new file mode 100644 index 00000000000..2d130bc0bf8 --- /dev/null +++ b/config/initializers/new_framework_defaults.rb @@ -0,0 +1,29 @@ +# Remove this `if` condition when upgraded to rails 5.0. +# The body must be kept. +if Gitlab.rails5? + # Be sure to restart your server when you modify this file. + # + # This file contains migration options to ease your Rails 5.0 upgrade. + # + # Once upgraded flip defaults one by one to migrate to the new default. + # + # Read the Guide for Upgrading Ruby on Rails for more info on each option. + + Rails.application.config.action_controller.raise_on_unfiltered_parameters = true + + # Enable per-form CSRF tokens. Previous versions had false. + Rails.application.config.action_controller.per_form_csrf_tokens = false + + # Enable origin-checking CSRF mitigation. Previous versions had false. + Rails.application.config.action_controller.forgery_protection_origin_check = false + + # Make Ruby 2.4 preserve the timezone of the receiver when calling `to_time`. + # Previous versions had false. + ActiveSupport.to_time_preserves_timezone = false + + # Require `belongs_to` associations by default. Previous versions had false. + Rails.application.config.active_record.belongs_to_required_by_default = false + + # Do not halt callback chains when a callback returns false. Previous versions had true. + ActiveSupport.halt_callback_chains_on_return_false = true +end diff --git a/config/initializers/omniauth.rb b/config/initializers/omniauth.rb index e9e1f1c4e9b..00baea08613 100644 --- a/config/initializers/omniauth.rb +++ b/config/initializers/omniauth.rb @@ -1,6 +1,6 @@ -if Gitlab::LDAP::Config.enabled? +if Gitlab::Auth::LDAP::Config.enabled? module OmniAuth::Strategies - Gitlab::LDAP::Config.available_servers.each do |server| + Gitlab::Auth::LDAP::Config.available_servers.each do |server| # do not redeclare LDAP next if server['provider_name'] == 'ldap' diff --git a/config/initializers/peek.rb b/config/initializers/peek.rb index 1cff355346c..ba04a2bf5fa 100644 --- a/config/initializers/peek.rb +++ b/config/initializers/peek.rb @@ -2,23 +2,27 @@ Rails.application.config.peek.adapter = :redis, { client: ::Redis.new(Gitlab::Re Peek.into Peek::Views::Host Peek.into Peek::Views::PerformanceBar + if Gitlab::Database.mysql? require 'peek-mysql2' PEEK_DB_CLIENT = ::Mysql2::Client PEEK_DB_VIEW = Peek::Views::Mysql2 -else +elsif Gitlab::Database.postgresql? require 'peek-pg' PEEK_DB_CLIENT = ::PG::Connection PEEK_DB_VIEW = Peek::Views::PG +else + raise "Unsupported database adapter for peek!" end + Peek.into PEEK_DB_VIEW +Peek.into Peek::Views::Gitaly +Peek.into Peek::Views::Rblineprof Peek.into Peek::Views::Redis Peek.into Peek::Views::Sidekiq -Peek.into Peek::Views::Rblineprof Peek.into Peek::Views::GC -Peek.into Peek::Views::Gitaly -# rubocop:disable Style/ClassAndModuleCamelCase +# rubocop:disable Naming/ClassAndModuleCamelCase class PEEK_DB_CLIENT class << self attr_accessor :query_details diff --git a/config/initializers/query_limiting.rb b/config/initializers/query_limiting.rb new file mode 100644 index 00000000000..66864d1898e --- /dev/null +++ b/config/initializers/query_limiting.rb @@ -0,0 +1,9 @@ +if Gitlab::QueryLimiting.enable? + require_dependency 'gitlab/query_limiting/active_support_subscriber' + require_dependency 'gitlab/query_limiting/transaction' + require_dependency 'gitlab/query_limiting/middleware' + + Gitlab::Application.configure do |config| + config.middleware.use(Gitlab::QueryLimiting::Middleware) + end +end diff --git a/config/initializers/rack_attack_global.rb b/config/initializers/rack_attack_global.rb index 9453df2ec5a..a90516eee7d 100644 --- a/config/initializers/rack_attack_global.rb +++ b/config/initializers/rack_attack_global.rb @@ -26,6 +26,7 @@ class Rack::Attack throttle('throttle_unauthenticated', Gitlab::Throttle.unauthenticated_options) do |req| Gitlab::Throttle.settings.throttle_unauthenticated_enabled && req.unauthenticated? && + !req.api_internal_request? && req.ip end @@ -54,6 +55,10 @@ class Rack::Attack path.start_with?('/api') end + def api_internal_request? + path =~ %r{^/api/v\d+/internal/} + end + def web_request? !api_request? end diff --git a/config/initializers/rugged_use_gitlab_git_attributes.rb b/config/initializers/rugged_use_gitlab_git_attributes.rb deleted file mode 100644 index 1cfb3bcb4bd..00000000000 --- a/config/initializers/rugged_use_gitlab_git_attributes.rb +++ /dev/null @@ -1,28 +0,0 @@ -# We don't want to ever call Rugged::Repository#fetch_attributes, because it has -# a lot of I/O overhead: -# <https://gitlab.com/gitlab-org/gitlab_git/commit/340e111e040ae847b614d35b4d3173ec48329015> -# -# While we don't do this from within the GitLab source itself, the Linguist gem -# has a dependency on Rugged and uses the gitattributes file when calculating -# repository-wide language statistics: -# <https://github.com/github/linguist/blob/v4.7.0/lib/linguist/lazy_blob.rb#L33-L36> -# -# The options passed by Linguist are those assumed by Gitlab::Git::Attributes -# anyway, and there is no great efficiency gain from just fetching the listed -# attributes with our implementation, so we ignore the additional arguments. -# -module Rugged - class Repository - module UseGitlabGitAttributes - def fetch_attributes(name, *) - attributes.attributes(name) - end - - def attributes - @attributes ||= Gitlab::Git::Attributes.new(path) - end - end - - prepend UseGitlabGitAttributes - end -end diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb index 0f164e628f9..f6803eb0b5a 100644 --- a/config/initializers/sidekiq.rb +++ b/config/initializers/sidekiq.rb @@ -5,16 +5,23 @@ queues_config_hash[:namespace] = Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE # Default is to retry 25 times with exponential backoff. That's too much. Sidekiq.default_worker_options = { retry: 3 } +enable_json_logs = Gitlab.config.sidekiq.log_format == 'json' + Sidekiq.configure_server do |config| config.redis = queues_config_hash config.server_middleware do |chain| - chain.add Gitlab::SidekiqMiddleware::ArgumentsLogger if ENV['SIDEKIQ_LOG_ARGUMENTS'] - chain.add Gitlab::SidekiqMiddleware::MemoryKiller if ENV['SIDEKIQ_MEMORY_KILLER_MAX_RSS'] + chain.add Gitlab::SidekiqMiddleware::ArgumentsLogger if ENV['SIDEKIQ_LOG_ARGUMENTS'] && !enable_json_logs + chain.add Gitlab::SidekiqMiddleware::Shutdown chain.add Gitlab::SidekiqMiddleware::RequestStoreMiddleware unless ENV['SIDEKIQ_REQUEST_STORE'] == '0' chain.add Gitlab::SidekiqStatus::ServerMiddleware end + if enable_json_logs + Sidekiq.logger.formatter = Gitlab::SidekiqLogging::JSONFormatter.new + config.options[:job_logger] = Gitlab::SidekiqLogging::StructuredLogger + end + config.client_middleware do |chain| chain.add Gitlab::SidekiqStatus::ClientMiddleware end diff --git a/config/initializers/warden.rb b/config/initializers/warden.rb index 3d83fb92d56..ee034d21eae 100644 --- a/config/initializers/warden.rb +++ b/config/initializers/warden.rb @@ -2,4 +2,8 @@ Rails.application.configure do |config| Warden::Manager.after_set_user do |user, auth, opts| Gitlab::Auth::UniqueIpsLimiter.limit_user!(user) end + + Warden::Manager.before_failure do |env, opts| + Gitlab::Auth::BlockedUserTracker.log_if_user_blocked(env) + end end diff --git a/config/karma.config.js b/config/karma.config.js index 609c4780ccc..3be4d5220f4 100644 --- a/config/karma.config.js +++ b/config/karma.config.js @@ -5,7 +5,7 @@ var ROOT_PATH = path.resolve(__dirname, '..'); // remove problematic plugins if (webpackConfig.plugins) { - webpackConfig.plugins = webpackConfig.plugins.filter(function (plugin) { + webpackConfig.plugins = webpackConfig.plugins.filter(function(plugin) { return !( plugin instanceof webpack.optimize.CommonsChunkPlugin || plugin instanceof webpack.optimize.ModuleConcatenationPlugin || @@ -26,6 +26,8 @@ webpackConfig.devtool = 'cheap-inline-source-map'; // Karma configuration module.exports = function(config) { + process.env.TZ = 'Etc/UTC'; + var progressReporter = process.env.CI ? 'mocha' : 'progress'; var karmaConfig = { @@ -40,12 +42,12 @@ module.exports = function(config) { // escalated kernel privileges (e.g. docker run --cap-add=CAP_SYS_ADMIN) '--no-sandbox', ], - } + }, }, frameworks: ['jasmine'], files: [ { pattern: 'spec/javascripts/test_bundle.js', watched: false }, - { pattern: 'spec/javascripts/fixtures/**/*@(.json|.html|.html.raw)', included: false }, + { pattern: 'spec/javascripts/fixtures/**/*@(.json|.html|.html.raw|.png)', included: false }, ], preprocessors: { 'spec/javascripts/**/*.js': ['webpack', 'sourcemap'], @@ -61,7 +63,7 @@ module.exports = function(config) { reports: ['html', 'text-summary'], dir: 'coverage-javascript/', subdir: '.', - fixWebpackSourcePaths: true + fixWebpackSourcePaths: true, }; karmaConfig.browserNoActivityTimeout = 60000; // 60 seconds } diff --git a/config/locales/doorkeeper.en.yml b/config/locales/doorkeeper.en.yml index b1c71095d4f..889111282ef 100644 --- a/config/locales/doorkeeper.en.yml +++ b/config/locales/doorkeeper.en.yml @@ -68,7 +68,7 @@ en: read_user: Read-only access to the user's profile information, like username, public email and full name openid: - The ability to authenticate using GitLab, and read-only access to the user's profile information + The ability to authenticate using GitLab, and read-only access to the user's profile information and group memberships sudo: Access to the Sudo feature, to perform API actions as any user in the system (only available for admins) flash: diff --git a/config/locales/en.yml b/config/locales/en.yml index 8932db138d9..795e5d4e6bc 100644 --- a/config/locales/en.yml +++ b/config/locales/en.yml @@ -2,6 +2,18 @@ # See https://github.com/svenfuchs/rails-i18n/tree/master/rails%2Flocale for starting points. en: + hello: "Hello world" + activerecord: + attributes: + issue_link: + source: Source issue + target: Target issue + errors: + messages: + label_already_exists_at_group_level: "already exists at group level for %{group}. Please choose another one." + wrong_size: "is the wrong size (should be %{file_size})" + size_too_small: "is too small (should be at least %{file_size})" + size_too_big: "is too big (should be at most %{file_size})" views: pagination: previous: "Prev" diff --git a/config/prometheus/additional_metrics.yml b/config/prometheus/additional_metrics.yml index 601a86490d4..10ca612b246 100644 --- a/config/prometheus/additional_metrics.yml +++ b/config/prometheus/additional_metrics.yml @@ -26,7 +26,7 @@ weight: 1 queries: - query_range: 'avg(nginx_upstream_response_msecs_avg{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"})' - label: Average + label: Pod average unit: ms - title: "HTTP Error Rate" y_label: "HTTP 500 Errors / Sec" @@ -139,21 +139,39 @@ - group: System metrics (Kubernetes) priority: 5 metrics: - - title: "Memory Usage" - y_label: "Memory Usage (MB)" + - title: "Memory Usage (Total)" + y_label: "Total Memory Used" required_metrics: - container_memory_usage_bytes - weight: 1 + weight: 4 queries: - - query_range: '(sum(avg(container_memory_usage_bytes{container_name!="POD",environment="%{ci_environment_slug}"}) without (job))) / count(avg(container_memory_usage_bytes{container_name!="POD",environment="%{ci_environment_slug}"}) without (job)) /1024/1024' - label: Average + - query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024' + label: Total + unit: GB + - title: "Core Usage (Total)" + y_label: "Total Cores" + required_metrics: + - container_cpu_usage_seconds_total + weight: 3 + queries: + - query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job)' + label: Total + unit: "cores" + - title: "Memory Usage (Pod average)" + y_label: "Memory Used per Pod" + required_metrics: + - container_memory_usage_bytes + weight: 2 + queries: + - query_range: 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024' + label: Pod average unit: MB - - title: "CPU Utilization" - y_label: "CPU Utilization (%)" + - title: "Core Usage (Pod average)" + y_label: "Cores per Pod" required_metrics: - container_cpu_usage_seconds_total weight: 1 queries: - - query_range: 'sum(avg(rate(container_cpu_usage_seconds_total{container_name!="POD",environment="%{ci_environment_slug}"}[2m])) without (job)) * 100' - label: Average - unit: "%"
\ No newline at end of file + - query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))' + label: Pod average + unit: "cores"
\ No newline at end of file diff --git a/config/routes.rb b/config/routes.rb index f162043dd5e..52726f94753 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -43,10 +43,8 @@ Rails.application.routes.draw do get 'liveness' => 'health#liveness' get 'readiness' => 'health#readiness' post 'storage_check' => 'health#storage_check' - get 'ide' => 'ide#index' - get 'ide/*vueroute' => 'ide#index', format: false resources :metrics, only: [:index] - mount Peek::Railtie => '/peek' + mount Peek::Railtie => '/peek', as: 'peek_routes' # Boards resources shared between group and projects resources :boards, only: [] do @@ -60,6 +58,12 @@ Rails.application.routes.draw do resources :issues, module: :boards, only: [:index, :update] end + + # UserCallouts + resources :user_callouts, only: [:create] + + get 'ide' => 'ide#index' + get 'ide/*vueroute' => 'ide#index', format: false end # Koding route diff --git a/config/routes/admin.rb b/config/routes/admin.rb index e22fb440abc..3cca1210e39 100644 --- a/config/routes/admin.rb +++ b/config/routes/admin.rb @@ -1,5 +1,5 @@ namespace :admin do - resources :users, constraints: { id: /[a-zA-Z.\/0-9_\-]+/ } do + resources :users, constraints: { id: %r{[a-zA-Z./0-9_\-]+} } do resources :keys, only: [:show, :destroy] resources :identities, except: [:show] resources :impersonation_tokens, only: [:index, :create] do @@ -24,6 +24,8 @@ namespace :admin do resource :impersonation, only: :destroy resources :abuse_reports, only: [:index, :destroy] + resources :gitaly_servers, only: [:index] + resources :spam_logs, only: [:index, :destroy] do member do post :mark_as_ham diff --git a/config/routes/ci.rb b/config/routes/ci.rb index 60c1724bc05..ebd321ed097 100644 --- a/config/routes/ci.rb +++ b/config/routes/ci.rb @@ -1,5 +1,5 @@ namespace :ci do - resource :lint, only: [:show, :create] + resource :lint, only: :show root to: redirect('') end diff --git a/config/routes/git_http.rb b/config/routes/git_http.rb index a53c94326d4..ec5c68f81df 100644 --- a/config/routes/git_http.rb +++ b/config/routes/git_http.rb @@ -16,6 +16,13 @@ scope(path: '*namespace_id/:project_id', get '/*oid', action: :deprecated end + scope(path: 'info/lfs') do + resources :lfs_locks, controller: :lfs_locks_api, path: 'locks' do + post :unlock, on: :member + post :verify, on: :collection + end + end + # GitLab LFS object storage scope(path: 'gitlab-lfs/objects/*oid', controller: :lfs_storage, constraints: { oid: /[a-f0-9]{64}/ }) do get '/', action: :download @@ -33,7 +40,7 @@ scope(path: '*namespace_id/:project_id', # /info/refs?service=git-receive-pack, but nothing else. # git_http_handshake = lambda do |request| - ProjectUrlConstrainer.new.matches?(request) && + ::Constraints::ProjectUrlConstrainer.new.matches?(request) && (request.query_string.blank? || request.query_string.match(/\Aservice=git-(upload|receive)-pack\z/)) end diff --git a/config/routes/group.rb b/config/routes/group.rb index 976837a246d..170508e893d 100644 --- a/config/routes/group.rb +++ b/config/routes/group.rb @@ -1,10 +1,8 @@ -require 'constraints/group_url_constrainer' - resources :groups, only: [:index, :new, :create] do post :preview_markdown end -constraints(GroupUrlConstrainer.new) do +constraints(::Constraints::GroupUrlConstrainer.new) do scope(path: 'groups/*id', controller: :groups, constraints: { id: Gitlab::PathRegex.full_namespace_route_regex, format: /(html|json|atom)/ }) do @@ -14,6 +12,7 @@ constraints(GroupUrlConstrainer.new) do get :merge_requests, as: :merge_requests_group get :projects, as: :projects_group get :activity, as: :activity_group + put :transfer, as: :transfer_group end get '/', action: :show, as: :group_canonical @@ -25,9 +24,10 @@ constraints(GroupUrlConstrainer.new) do constraints: { group_id: Gitlab::PathRegex.full_namespace_route_regex }) do namespace :settings do resource :ci_cd, only: [:show], controller: 'ci_cd' + resources :badges, only: [:index] end - resources :variables, only: [:index, :show, :update, :create, :destroy] + resource :variables, only: [:show, :update] resources :children, only: [:index] @@ -35,7 +35,7 @@ constraints(GroupUrlConstrainer.new) do post :toggle_subscription, on: :member end - resources :milestones, constraints: { id: /[^\/]+/ }, only: [:index, :show, :edit, :update, :new, :create] do + resources :milestones, constraints: { id: %r{[^/]+} }, only: [:index, :show, :edit, :update, :new, :create] do member do get :merge_requests get :participants @@ -52,9 +52,12 @@ constraints(GroupUrlConstrainer.new) do resources :uploads, only: [:create] do collection do - get ":secret/:filename", action: :show, as: :show, constraints: { filename: /[^\/]+/ } + get ":secret/:filename", action: :show, as: :show, constraints: { filename: %r{[^/]+} } end end + + # On CE only index and show actions are needed + resources :boards, only: [:index, :show] end scope(path: '*id', diff --git a/config/routes/project.rb b/config/routes/project.rb index 239b5480321..2a1bcb8cde2 100644 --- a/config/routes/project.rb +++ b/config/routes/project.rb @@ -1,10 +1,8 @@ -require 'constraints/project_url_constrainer' - resources :projects, only: [:index, :new, :create] draw :git_http -constraints(ProjectUrlConstrainer.new) do +constraints(::Constraints::ProjectUrlConstrainer.new) do # If the route has a wildcard segment, the segment has a regex constraint, # the segment is potentially followed by _another_ wildcard segment, and # the `format` option is not set to false, we need to specify that @@ -40,7 +38,7 @@ constraints(ProjectUrlConstrainer.new) do # # Templates # - get '/templates/:template_type/:key' => 'templates#show', as: :template + get '/templates/:template_type/:key' => 'templates#show', as: :template, constraints: { key: %r{[^/]+} } resource :avatar, only: [:show, :destroy] resources :commit, only: [:show], constraints: { id: /\h{7,40}/ } do @@ -50,11 +48,16 @@ constraints(ProjectUrlConstrainer.new) do post :revert post :cherry_pick get :diff_for_path + get :merge_requests end end - resource :pages, only: [:show, :destroy] do - resources :domains, only: [:show, :new, :create, :destroy], controller: 'pages_domains', constraints: { id: /[^\/]+/ } + resource :pages, only: [:show, :update, :destroy] do + resources :domains, except: :index, controller: 'pages_domains', constraints: { id: %r{[^/]+} } do + member do + post :verify + end + end end resources :snippets, concerns: :awardable, constraints: { id: /\d+/ } do @@ -64,7 +67,7 @@ constraints(ProjectUrlConstrainer.new) do end end - resources :services, constraints: { id: /[^\/]+/ }, only: [:index, :edit, :update] do + resources :services, constraints: { id: %r{[^/]+} }, only: [:edit, :update] do member do put :test end @@ -73,7 +76,9 @@ constraints(ProjectUrlConstrainer.new) do resource :mattermost, only: [:new, :create] namespace :prometheus do - get :active_metrics + resources :metrics, constraints: { id: %r{[^\/]+} }, only: [] do + get :active_common, on: :collection + end end resources :deploy_keys, constraints: { id: /\d+/ }, only: [:index, :new, :create, :edit, :update] do @@ -83,6 +88,12 @@ constraints(ProjectUrlConstrainer.new) do end end + resources :deploy_tokens, constraints: { id: /\d+/ }, only: [] do + member do + put :revoke + end + end + resources :forks, only: [:index, :new, :create] resource :import, only: [:new, :create, :show] @@ -96,6 +107,8 @@ constraints(ProjectUrlConstrainer.new) do post :toggle_subscription post :remove_wip post :assign_related_issues + get :discussions, format: :json + post :rebase scope constraints: { format: nil }, action: :show do get :commits, defaults: { tab: 'commits' } @@ -123,7 +136,7 @@ constraints(ProjectUrlConstrainer.new) do post :bulk_update end - resources :discussions, only: [], constraints: { id: /\h{40}/ } do + resources :discussions, only: [:show], constraints: { id: /\h{40}/ } do member do post :resolve delete :resolve, action: :unresolve @@ -154,7 +167,8 @@ constraints(ProjectUrlConstrainer.new) do end end - resources :variables, only: [:index, :show, :update, :create, :destroy] + resource :variables, only: [:show, :update] + resources :triggers, only: [:index, :create, :edit, :update, :destroy] do member do post :take_ownership @@ -241,6 +255,8 @@ constraints(ProjectUrlConstrainer.new) do end scope '-' do + get 'archive/*id', constraints: { format: Gitlab::PathRegex.archive_formats_regex, id: /.+?/ }, to: 'repositories#archive', as: 'archive' + resources :jobs, only: [:index, :show], constraints: { id: /\d+/ } do collection do post :cancel_all @@ -272,6 +288,10 @@ constraints(ProjectUrlConstrainer.new) do post :keep end end + + namespace :ci do + resource :lint, only: [:show, :create] + end end draw :legacy_builds @@ -344,7 +364,7 @@ constraints(ProjectUrlConstrainer.new) do end end - resources :project_members, except: [:show, :new, :edit], constraints: { id: /[a-zA-Z.\/0-9_\-#%+]+/ }, concerns: :access_requestable do + resources :project_members, except: [:show, :new, :edit], constraints: { id: %r{[a-zA-Z./0-9_\-#%+]+} }, concerns: :access_requestable do collection do delete :leave @@ -371,20 +391,21 @@ constraints(ProjectUrlConstrainer.new) do get 'noteable/:target_type/:target_id/notes' => 'notes#index', as: 'noteable_notes' - resources :boards, only: [:index, :show, :create, :update, :destroy] + # On CE only index and show are needed + resources :boards, only: [:index, :show] resources :todos, only: [:create] resources :uploads, only: [:create] do collection do - get ":secret/:filename", action: :show, as: :show, constraints: { filename: /[^\/]+/ } + get ":secret/:filename", action: :show, as: :show, constraints: { filename: %r{[^/]+} } end end resources :runners, only: [:index, :edit, :update, :destroy, :show] do member do - get :resume - get :pause + post :resume + post :pause end collection do @@ -407,9 +428,14 @@ constraints(ProjectUrlConstrainer.new) do end namespace :settings do get :members, to: redirect("%{namespace_id}/%{project_id}/project_members") - resource :ci_cd, only: [:show], controller: 'ci_cd' + resource :ci_cd, only: [:show, :update], controller: 'ci_cd' do + post :reset_cache + end resource :integrations, only: [:show] - resource :repository, only: [:show], controller: :repository + resource :repository, only: [:show], controller: :repository do + post :create_deploy_token, path: 'deploy_token/create' + end + resources :badges, only: [:index] end # Since both wiki and repository routing contains wildcard characters diff --git a/config/routes/repository.rb b/config/routes/repository.rb index 9ffdebbcff1..9e506a1a43a 100644 --- a/config/routes/repository.rb +++ b/config/routes/repository.rb @@ -2,10 +2,11 @@ resource :repository, only: [:create] do member do - get ':ref/archive', constraints: { format: Gitlab::PathRegex.archive_formats_regex, ref: /.+/ }, action: 'archive', as: 'archive' - # deprecated since GitLab 9.5 - get 'archive', constraints: { format: Gitlab::PathRegex.archive_formats_regex }, as: 'archive_alternative' + get 'archive', constraints: { format: Gitlab::PathRegex.archive_formats_regex }, as: 'archive_alternative', defaults: { append_sha: true } + + # deprecated since GitLab 10.7 + get ':id/archive', constraints: { format: Gitlab::PathRegex.archive_formats_regex, id: /.+/ }, action: 'archive', as: 'archive_deprecated', defaults: { append_sha: true } end end @@ -49,6 +50,7 @@ scope format: false do end end + get '/branches/:state', to: 'branches#index', as: :branches_filtered, constraints: { state: /active|stale|all/ } resources :branches, only: [:index, :new, :create, :destroy] delete :merged_branches, controller: 'branches', action: :destroy_all_merged resources :tags, only: [:index, :show, :new, :create, :destroy] do diff --git a/config/routes/uploads.rb b/config/routes/uploads.rb index d7bca8310e4..6370645bcb9 100644 --- a/config/routes/uploads.rb +++ b/config/routes/uploads.rb @@ -2,17 +2,17 @@ scope path: :uploads do # Note attachments and User/Group/Project avatars get "-/system/:model/:mounted_as/:id/:filename", to: "uploads#show", - constraints: { model: /note|user|group|project/, mounted_as: /avatar|attachment/, filename: /[^\/]+/ } + constraints: { model: /note|user|group|project/, mounted_as: /avatar|attachment/, filename: %r{[^/]+} } # show uploads for models, snippets (notes) available for now get '-/system/:model/:id/:secret/:filename', to: 'uploads#show', - constraints: { model: /personal_snippet/, id: /\d+/, filename: /[^\/]+/ } + constraints: { model: /personal_snippet/, id: /\d+/, filename: %r{[^/]+} } # show temporary uploads get '-/system/temp/:secret/:filename', to: 'uploads#show', - constraints: { filename: /[^\/]+/ } + constraints: { filename: %r{[^/]+} } # Appearance get "-/system/:model/:mounted_as/:id/:filename", @@ -22,7 +22,7 @@ scope path: :uploads do # Project markdown uploads get ":namespace_id/:project_id/:secret/:filename", to: "projects/uploads#show", - constraints: { namespace_id: /[a-zA-Z.0-9_\-]+/, project_id: /[a-zA-Z.0-9_\-]+/, filename: /[^\/]+/ } + constraints: { namespace_id: /[a-zA-Z.0-9_\-]+/, project_id: /[a-zA-Z.0-9_\-]+/, filename: %r{[^/]+} } # create uploads for models, snippets (notes) available for now post ':model', @@ -34,4 +34,4 @@ end # Redirect old note attachments path to new uploads path. get "files/note/:id/:filename", to: redirect("uploads/note/attachment/%{id}/%{filename}"), - constraints: { filename: /[^\/]+/ } + constraints: { filename: %r{[^/]+} } diff --git a/config/routes/user.rb b/config/routes/user.rb index 733a3f6ce9a..57fb37530bb 100644 --- a/config/routes/user.rb +++ b/config/routes/user.rb @@ -1,5 +1,3 @@ -require 'constraints/user_url_constrainer' - devise_for :users, controllers: { omniauth_callbacks: :omniauth_callbacks, registrations: :registrations, passwords: :passwords, @@ -35,7 +33,7 @@ scope(constraints: { username: Gitlab::PathRegex.root_namespace_route_regex }) d get '/u/:username/contributed', to: redirect('users/%{username}/contributed') end -constraints(UserUrlConstrainer.new) do +constraints(::Constraints::UserUrlConstrainer.new) do # Get all keys of user get ':username.keys' => 'profiles/keys#get_keys', constraints: { username: Gitlab::PathRegex.root_namespace_route_regex } diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml index 31a38f2b508..c811034b29d 100644 --- a/config/sidekiq_queues.yml +++ b/config/sidekiq_queues.yml @@ -67,3 +67,8 @@ - [gcp_cluster, 1] - [project_migrate_hashed_storage, 1] - [storage_migrator, 1] + - [pages_domain_verification, 1] + - [object_storage_upload, 1] + - [object_storage, 1] + - [plugin, 1] + - [pipeline_background, 1] diff --git a/config/spring.rb b/config/spring.rb new file mode 100644 index 00000000000..c9119b40c08 --- /dev/null +++ b/config/spring.rb @@ -0,0 +1,6 @@ +%w( + .ruby-version + .rbenv-vars + tmp/restart.txt + tmp/caching-dev.txt +).each { |path| Spring.watch(path) } diff --git a/config/svg.config.js b/config/svg.config.js deleted file mode 100644 index bb27f0caeef..00000000000 --- a/config/svg.config.js +++ /dev/null @@ -1,48 +0,0 @@ -/* eslint-disable no-commonjs */ -const path = require('path'); -const fs = require('fs'); - -const sourcePath = path.join('node_modules', '@gitlab-org/gitlab-svgs', 'dist'); -const sourcePathIllustrations = path.join('node_modules', '@gitlab-org/gitlab-svgs', 'dist', 'illustrations'); -const destPath = path.normalize(path.join('app', 'assets', 'images')); - -// Actual Task copying the 2 files + all illustrations -copyFileSync(path.join(sourcePath, 'icons.svg'), destPath); -copyFileSync(path.join(sourcePath, 'icons.json'), destPath); -copyFolderRecursiveSync(sourcePathIllustrations, destPath); - -// Helper Functions -function copyFileSync(source, target) { - var targetFile = target; - //if target is a directory a new file with the same name will be created - if (fs.existsSync(target)) { - if (fs.lstatSync(target).isDirectory()) { - targetFile = path.join(target, path.basename(source)); - } - } - console.log(`Copy SVG File : ${targetFile}`); - fs.writeFileSync(targetFile, fs.readFileSync(source)); -} - -function copyFolderRecursiveSync(source, target) { - var files = []; - - //check if folder needs to be created or integrated - var targetFolder = path.join(target, path.basename(source)); - if (!fs.existsSync(targetFolder)) { - fs.mkdirSync(targetFolder); - } - - //copy - if (fs.lstatSync(source).isDirectory()) { - files = fs.readdirSync(source); - files.forEach(function (file) { - var curSource = path.join(source, file); - if (fs.lstatSync(curSource).isDirectory()) { - copyFolderRecursiveSync(curSource, targetFolder); - } else { - copyFileSync(curSource, targetFolder); - } - }); - } -} diff --git a/config/unicorn.rb.example.development b/config/unicorn.rb.example.development index 3cd00d53a15..0df028648d1 100644 --- a/config/unicorn.rb.example.development +++ b/config/unicorn.rb.example.development @@ -1,2 +1,15 @@ worker_processes 2 timeout 60 + +before_fork do |server, worker| + if /darwin/ =~ RUBY_PLATFORM + require 'fiddle' + + # Dynamically load Foundation.framework, ~implicitly~ initialising + # the Objective-C runtime before any forking happens in Unicorn + # + # From https://bugs.ruby-lang.org/issues/14009 + Fiddle.dlopen '/System/Library/Frameworks/Foundation.framework/Foundation' + end +end + diff --git a/config/webpack.config.js b/config/webpack.config.js index 1218b0ef208..39e9fbbd530 100644 --- a/config/webpack.config.js +++ b/config/webpack.config.js @@ -1,93 +1,60 @@ -'use strict'; - -var fs = require('fs'); -var path = require('path'); -var webpack = require('webpack'); -var StatsWriterPlugin = require('webpack-stats-plugin').StatsWriterPlugin; -var CopyWebpackPlugin = require('copy-webpack-plugin'); -var CompressionPlugin = require('compression-webpack-plugin'); -var NameAllModulesPlugin = require('name-all-modules-plugin'); -var BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin; -var WatchMissingNodeModulesPlugin = require('react-dev-utils/WatchMissingNodeModulesPlugin'); - -var ROOT_PATH = path.resolve(__dirname, '..'); -var IS_PRODUCTION = process.env.NODE_ENV === 'production'; -var IS_DEV_SERVER = process.argv.join(' ').indexOf('webpack-dev-server') !== -1; -var DEV_SERVER_HOST = process.env.DEV_SERVER_HOST || 'localhost'; -var DEV_SERVER_PORT = parseInt(process.env.DEV_SERVER_PORT, 10) || 3808; -var DEV_SERVER_LIVERELOAD = process.env.DEV_SERVER_LIVERELOAD !== 'false'; -var WEBPACK_REPORT = process.env.WEBPACK_REPORT; -var NO_COMPRESSION = process.env.NO_COMPRESSION; - -var config = { - // because sqljs requires fs. - node: { - fs: "empty" - }, +const crypto = require('crypto'); +const fs = require('fs'); +const path = require('path'); +const glob = require('glob'); +const webpack = require('webpack'); +const StatsWriterPlugin = require('webpack-stats-plugin').StatsWriterPlugin; +const CopyWebpackPlugin = require('copy-webpack-plugin'); +const CompressionPlugin = require('compression-webpack-plugin'); +const NameAllModulesPlugin = require('name-all-modules-plugin'); +const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin; +const WatchMissingNodeModulesPlugin = require('react-dev-utils/WatchMissingNodeModulesPlugin'); + +const ROOT_PATH = path.resolve(__dirname, '..'); +const IS_PRODUCTION = process.env.NODE_ENV === 'production'; +const IS_DEV_SERVER = process.argv.join(' ').indexOf('webpack-dev-server') !== -1; +const DEV_SERVER_HOST = process.env.DEV_SERVER_HOST || 'localhost'; +const DEV_SERVER_PORT = parseInt(process.env.DEV_SERVER_PORT, 10) || 3808; +const DEV_SERVER_LIVERELOAD = process.env.DEV_SERVER_LIVERELOAD !== 'false'; +const WEBPACK_REPORT = process.env.WEBPACK_REPORT; +const NO_COMPRESSION = process.env.NO_COMPRESSION; + +let autoEntriesCount = 0; +let watchAutoEntries = []; + +function generateEntries() { + // generate automatic entry points + const autoEntries = {}; + const pageEntries = glob.sync('pages/**/index.js', { + cwd: path.join(ROOT_PATH, 'app/assets/javascripts'), + }); + watchAutoEntries = [path.join(ROOT_PATH, 'app/assets/javascripts/pages/')]; + + function generateAutoEntries(path, prefix = '.') { + const chunkPath = path.replace(/\/index\.js$/, ''); + const chunkName = chunkPath.replace(/\//g, '.'); + autoEntries[chunkName] = `${prefix}/${path}`; + } + + pageEntries.forEach(path => generateAutoEntries(path)); + + autoEntriesCount = Object.keys(autoEntries).length; + + const manualEntries = { + common: './commons/index.js', + main: './main.js', + raven: './raven/index.js', + webpack_runtime: './webpack.js', + ide: './ide/index.js', + }; + + return Object.assign(manualEntries, autoEntries); +} + +const config = { context: path.join(ROOT_PATH, 'app/assets/javascripts'), - entry: { - account: './profile/account/index.js', - balsamiq_viewer: './blob/balsamiq_viewer.js', - blob: './blob_edit/blob_bundle.js', - boards: './boards/boards_bundle.js', - common: './commons/index.js', - common_vue: './vue_shared/vue_resource_interceptor.js', - cycle_analytics: './cycle_analytics/cycle_analytics_bundle.js', - commit_pipelines: './commit/pipelines/pipelines_bundle.js', - deploy_keys: './deploy_keys/index.js', - docs: './docs/docs_bundle.js', - diff_notes: './diff_notes/diff_notes_bundle.js', - environments: './environments/environments_bundle.js', - environments_folder: './environments/folder/environments_folder_bundle.js', - filtered_search: './filtered_search/filtered_search_bundle.js', - graphs: './graphs/graphs_bundle.js', - graphs_charts: './graphs/graphs_charts.js', - graphs_show: './graphs/graphs_show.js', - group: './group.js', - groups: './groups/index.js', - groups_list: './groups_list.js', - help: './help/help.js', - how_to_merge: './how_to_merge.js', - issue_show: './issue_show/index.js', - integrations: './integrations', - job_details: './jobs/job_details_bundle.js', - locale: './locale/index.js', - main: './main.js', - merge_conflicts: './merge_conflicts/merge_conflicts_bundle.js', - monitoring: './monitoring/monitoring_bundle.js', - network: './network/network_bundle.js', - notebook_viewer: './blob/notebook_viewer.js', - notes: './notes/index.js', - pdf_viewer: './blob/pdf_viewer.js', - pipelines: './pipelines/pipelines_bundle.js', - pipelines_charts: './pipelines/pipelines_charts.js', - pipelines_details: './pipelines/pipeline_details_bundle.js', - pipelines_times: './pipelines/pipelines_times.js', - profile: './profile/profile_bundle.js', - project_import_gl: './projects/project_import_gitlab_project.js', - project_new: './projects/project_new.js', - prometheus_metrics: './prometheus_metrics', - protected_branches: './protected_branches', - protected_tags: './protected_tags', - registry_list: './registry/index.js', - ide: './ide/index.js', - sidebar: './sidebar/sidebar_bundle.js', - schedule_form: './pipeline_schedules/pipeline_schedule_form_bundle.js', - schedules_index: './pipeline_schedules/pipeline_schedules_index_bundle.js', - snippet: './snippet/snippet_bundle.js', - sketch_viewer: './blob/sketch_viewer.js', - stl_viewer: './blob/stl_viewer.js', - terminal: './terminal/terminal_bundle.js', - u2f: ['vendor/u2f'], - ui_development_kit: './ui_development_kit.js', - raven: './raven/index.js', - vue_merge_request_widget: './vue_merge_request_widget/index.js', - test: './test.js', - two_factor_auth: './two_factor_auth.js', - users: './users/index.js', - performance_bar: './performance_bar.js', - webpack_runtime: './webpack.js', - }, + + entry: generateEntries, output: { path: path.join(ROOT_PATH, 'public/assets/webpack'), @@ -119,7 +86,12 @@ var config = { { test: /\_worker\.js$/, use: [ - { loader: 'worker-loader' }, + { + loader: 'worker-loader', + options: { + inline: true, + }, + }, { loader: 'babel-loader' }, ], }, @@ -129,7 +101,28 @@ var config = { loader: 'file-loader', options: { name: '[name].[hash].[ext]', - } + }, + }, + { + test: /katex.min.css$/, + include: /node_modules\/katex\/dist/, + use: [ + { loader: 'style-loader' }, + { + loader: 'css-loader', + options: { + name: '[name].[hash].[ext]', + }, + }, + ], + }, + { + test: /\.(eot|ttf|woff|woff2)$/, + include: /node_modules\/katex\/dist\/fonts/, + loader: 'file-loader', + options: { + name: '[name].[hash].[ext]', + }, }, { test: /monaco-editor\/\w+\/vs\/loader\.js$/, @@ -137,10 +130,11 @@ var config = { { loader: 'exports-loader', options: 'l.global' }, { loader: 'imports-loader', options: 'l=>{},this=>l,AMDLoader=>this,module=>undefined' }, ], - } + }, ], noParse: [/monaco-editor\/\w+\/vs\//], + strictExportPresence: true, }, plugins: [ @@ -149,15 +143,15 @@ var config = { new StatsWriterPlugin({ filename: 'manifest.json', transform: function(data, opts) { - var stats = opts.compiler.getStats().toJson({ + const stats = opts.compiler.getStats().toJson({ chunkModules: false, source: false, chunks: false, modules: false, - assets: true + assets: true, }); return JSON.stringify(stats, null, 2); - } + }, }), // prevent pikaday from including moment.js @@ -174,67 +168,42 @@ var config = { new NameAllModulesPlugin(), // assign deterministic chunk ids - new webpack.NamedChunksPlugin((chunk) => { + new webpack.NamedChunksPlugin(chunk => { if (chunk.name) { return chunk.name; } - return chunk.mapModules((m) => { + + const moduleNames = []; + + function collectModuleNames(m) { + // handle ConcatenatedModule which does not have resource nor context set + if (m.modules) { + m.modules.forEach(collectModuleNames); + return; + } + const pagesBase = path.join(ROOT_PATH, 'app/assets/javascripts/pages'); + if (m.resource.indexOf(pagesBase) === 0) { - return path.relative(pagesBase, m.resource) - .replace(/\/index\.[a-z]+$/, '') - .replace(/\//g, '__'); + moduleNames.push( + path + .relative(pagesBase, m.resource) + .replace(/\/index\.[a-z]+$/, '') + .replace(/\//g, '__') + ); + } else { + moduleNames.push(path.relative(m.context, m.resource)); } - return path.relative(m.context, m.resource); - }).join('_'); - }), + } - // create cacheable common library bundle for all vue chunks - new webpack.optimize.CommonsChunkPlugin({ - name: 'common_vue', - chunks: [ - 'boards', - 'commit_pipelines', - 'cycle_analytics', - 'deploy_keys', - 'diff_notes', - 'environments', - 'environments_folder', - 'filtered_search', - 'groups', - 'issue_show', - 'job_details', - 'merge_conflicts', - 'monitoring', - 'notebook_viewer', - 'notes', - 'pdf_viewer', - 'pipelines', - 'pipelines_details', - 'registry_list', - 'ide', - 'schedule_form', - 'schedules_index', - 'sidebar', - 'vue_merge_request_widget', - ], - minChunks: function(module, count) { - return module.resource && (/vue_shared/).test(module.resource); - }, - }), + chunk.forEachModule(collectModuleNames); - // create cacheable common library bundle for all d3 chunks - new webpack.optimize.CommonsChunkPlugin({ - name: 'common_d3', - chunks: [ - 'graphs', - 'graphs_show', - 'monitoring', - 'users', - ], - minChunks: function (module, count) { - return module.resource && /d3-/.test(module.resource); - }, + const hash = crypto + .createHash('sha256') + .update(moduleNames.join('_')) + .digest('hex'); + + return `${moduleNames[0]}-${hash.substr(0, 6)}`; }), // create cacheable common library bundles @@ -242,13 +211,13 @@ var config = { names: ['main', 'common', 'webpack_runtime'], }), - // enable scope hoisting - new webpack.optimize.ModuleConcatenationPlugin(), - // copy pre-compiled vendor libraries verbatim new CopyWebpackPlugin([ { - from: path.join(ROOT_PATH, `node_modules/monaco-editor/${IS_PRODUCTION ? 'min' : 'dev'}/vs`), + from: path.join( + ROOT_PATH, + `node_modules/monaco-editor/${IS_PRODUCTION ? 'min' : 'dev'}/vs` + ), to: 'monaco-editor/vs', transform: function(content, path) { if (/\.js$/.test(path) && !/worker/i.test(path) && !/typescript/i.test(path)) { @@ -261,24 +230,30 @@ var config = { ); } return content; - } - } + }, + }, ]), ], resolve: { extensions: ['.js'], alias: { - '~': path.join(ROOT_PATH, 'app/assets/javascripts'), - 'emojis': path.join(ROOT_PATH, 'fixtures/emojis'), - 'empty_states': path.join(ROOT_PATH, 'app/views/shared/empty_states'), - 'icons': path.join(ROOT_PATH, 'app/views/shared/icons'), - 'images': path.join(ROOT_PATH, 'app/assets/images'), - 'vendor': path.join(ROOT_PATH, 'vendor/assets/javascripts'), - 'vue$': 'vue/dist/vue.esm.js', - } - } -} + '~': path.join(ROOT_PATH, 'app/assets/javascripts'), + emojis: path.join(ROOT_PATH, 'fixtures/emojis'), + empty_states: path.join(ROOT_PATH, 'app/views/shared/empty_states'), + icons: path.join(ROOT_PATH, 'app/views/shared/icons'), + images: path.join(ROOT_PATH, 'app/assets/images'), + vendor: path.join(ROOT_PATH, 'vendor/assets/javascripts'), + vue$: 'vue/dist/vue.esm.js', + spec: path.join(ROOT_PATH, 'spec/javascripts'), + }, + }, + + // sqljs requires fs + node: { + fs: 'empty', + }, +}; if (IS_PRODUCTION) { config.devtool = 'source-map'; @@ -286,13 +261,14 @@ if (IS_PRODUCTION) { new webpack.NoEmitOnErrorsPlugin(), new webpack.LoaderOptionsPlugin({ minimize: true, - debug: false + debug: false, }), + new webpack.optimize.ModuleConcatenationPlugin(), new webpack.optimize.UglifyJsPlugin({ - sourceMap: true + sourceMap: true, }), new webpack.DefinePlugin({ - 'process.env': { NODE_ENV: JSON.stringify('production') } + 'process.env': { NODE_ENV: JSON.stringify('production') }, }) ); @@ -311,11 +287,30 @@ if (IS_DEV_SERVER) { headers: { 'Access-Control-Allow-Origin': '*' }, stats: 'errors-only', hot: DEV_SERVER_LIVERELOAD, - inline: DEV_SERVER_LIVERELOAD + inline: DEV_SERVER_LIVERELOAD, }; config.plugins.push( // watch node_modules for changes if we encounter a missing module compile error - new WatchMissingNodeModulesPlugin(path.join(ROOT_PATH, 'node_modules')) + new WatchMissingNodeModulesPlugin(path.join(ROOT_PATH, 'node_modules')), + + // watch for changes to our automatic entry point modules + { + apply(compiler) { + compiler.plugin('emit', (compilation, callback) => { + compilation.contextDependencies = [ + ...compilation.contextDependencies, + ...watchAutoEntries, + ]; + + // report our auto-generated bundle count + console.log( + `${autoEntriesCount} entries from '/pages' automatically added to webpack output.` + ); + + callback(); + }); + }, + } ); if (DEV_SERVER_LIVERELOAD) { config.plugins.push(new webpack.HotModuleReplacementPlugin()); |