summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab-ci.yml25
-rw-r--r--GITLAB_SHELL_VERSION2
-rw-r--r--app/assets/javascripts/environments/components/environment_actions.js11
-rw-r--r--app/assets/javascripts/environments/components/environment_external_url.js14
-rw-r--r--app/assets/javascripts/environments/components/environment_item.js19
-rw-r--r--app/assets/javascripts/environments/components/environment_monitoring.js31
-rw-r--r--app/assets/javascripts/environments/components/environment_stop.js12
-rw-r--r--app/assets/javascripts/environments/components/environment_terminal_button.js16
-rw-r--r--app/assets/javascripts/groups_select.js78
-rw-r--r--app/assets/javascripts/lib/utils/common_utils.js16
-rw-r--r--app/assets/javascripts/right_sidebar.js2
-rw-r--r--app/assets/javascripts/vue_pipelines_index/components/async_button.js1
-rw-r--r--app/assets/stylesheets/pages/environments.scss11
-rw-r--r--app/controllers/admin/application_settings_controller.rb9
-rw-r--r--app/controllers/admin/background_jobs_controller.rb2
-rw-r--r--app/controllers/import/base_controller.rb2
-rw-r--r--app/controllers/search_controller.rb40
-rw-r--r--app/helpers/issuables_helper.rb2
-rw-r--r--app/helpers/sidekiq_helper.rb6
-rw-r--r--app/models/blob.rb2
-rw-r--r--app/models/note.rb5
-rw-r--r--app/models/project_services/jira_service.rb2
-rw-r--r--app/models/project_team.rb3
-rw-r--r--app/models/system_note_metadata.rb11
-rw-r--r--app/models/user.rb6
-rw-r--r--app/serializers/environment_entity.rb7
-rw-r--r--app/services/issues/update_service.rb2
-rw-r--r--app/services/merge_requests/update_service.rb2
-rw-r--r--app/services/note_summary.rb20
-rw-r--r--app/services/notes/update_service.rb4
-rw-r--r--app/services/search/global_service.rb4
-rw-r--r--app/services/search/project_service.rb4
-rw-r--r--app/services/search/snippet_service.rb4
-rw-r--r--app/services/search_service.rb63
-rw-r--r--app/services/system_note_service.rb79
-rw-r--r--app/services/todo_service.rb38
-rw-r--r--app/services/users/create_service.rb2
-rw-r--r--app/views/shared/members/_member.html.haml4
-rw-r--r--changelogs/unreleased/24784-system-notes-meta-data.yml4
-rw-r--r--changelogs/unreleased/24861-stringify-group-member-details.yml4
-rw-r--r--changelogs/unreleased/26595-fix-issue-preselected-template.yml4
-rw-r--r--changelogs/unreleased/28799-todo-creation.yml4
-rw-r--r--changelogs/unreleased/29341-add-metrics-button-env-overview.yml4
-rw-r--r--changelogs/unreleased/29929-jira-doc.yml4
-rw-r--r--changelogs/unreleased/30289-allow-users-to-import-github-projects-to-subgroups.yml4
-rw-r--r--changelogs/unreleased/fix_admin_monitoring_background.yml4
-rw-r--r--changelogs/unreleased/forked-subquery-order.yml4
-rw-r--r--config/initializers/0_inflections.rb2
-rw-r--r--db/fixtures/production/001_admin.rb8
-rw-r--r--db/migrate/20170314082049_create_system_note_metadata.rb23
-rw-r--r--db/schema.rb9
-rw-r--r--doc/administration/high_availability/database.md4
-rw-r--r--doc/ci/examples/deployment/README.md86
-rw-r--r--doc/development/fe_guide/performance.md3
-rw-r--r--doc/development/fe_guide/testing.md4
-rw-r--r--doc/user/project/new_ci_build_permissions_model.md18
-rw-r--r--features/support/env.rb16
-rw-r--r--lib/gitlab/git/repository.rb7
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb25
-rw-r--r--spec/factories/system_note_metadata.rb6
-rw-r--r--spec/features/issues_spec.rb17
-rw-r--r--spec/features/projects/group_links_spec.rb22
-rw-r--r--spec/helpers/sidekiq_helper_spec.rb8
-rw-r--r--spec/javascripts/environments/environment_actions_spec.js7
-rw-r--r--spec/javascripts/environments/environment_monitoring_spec.js23
-rw-r--r--spec/javascripts/environments/environment_stop_spec.js2
-rw-r--r--spec/javascripts/environments/environment_terminal_button_spec.js2
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js31
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb27
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml1
-rw-r--r--spec/mailers/notify_spec.rb15
-rw-r--r--spec/migrations/migrate_process_commit_worker_jobs_spec.rb2
-rw-r--r--spec/models/system_note_metadata_spec.rb27
-rw-r--r--spec/policies/issue_policy_spec.rb2
-rw-r--r--spec/policies/project_policy_spec.rb2
-rw-r--r--spec/serializers/analytics_issue_serializer_spec.rb2
-rw-r--r--spec/serializers/analytics_merge_request_serializer_spec.rb2
-rw-r--r--spec/serializers/commit_entity_spec.rb2
-rw-r--r--spec/serializers/environment_entity_spec.rb20
-rw-r--r--spec/serializers/environment_serializer_spec.rb15
-rw-r--r--spec/services/issues/update_service_spec.rb11
-rw-r--r--spec/services/merge_requests/update_service_spec.rb11
-rw-r--r--spec/services/note_summary_spec.rb44
-rw-r--r--spec/services/notes/update_service_spec.rb16
-rw-r--r--spec/services/search/global_service_spec.rb66
-rw-r--r--spec/services/search_service_spec.rb299
-rw-r--r--spec/services/system_note_service_spec.rb169
-rw-r--r--spec/services/todo_service_spec.rb138
-rw-r--r--spec/services/users/create_service_spec.rb17
-rw-r--r--spec/support/controllers/githubish_import_controller_shared_examples.rb14
-rw-r--r--spec/support/notify_shared_examples.rb2
-rw-r--r--spec/support/select2_helper.rb8
-rw-r--r--spec/support/test_env.rb4
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb6
-rw-r--r--spec/views/projects/builds/_build.html.haml_spec.rb2
-rw-r--r--spec/views/projects/builds/_generic_commit_status.html.haml_spec.rb2
-rw-r--r--spec/views/projects/builds/show.html.haml_spec.rb2
-rw-r--r--spec/views/projects/commit/_commit_box.html.haml_spec.rb2
-rw-r--r--spec/views/projects/issues/_related_branches.html.haml_spec.rb2
-rw-r--r--spec/views/projects/merge_requests/_commits.html.haml_spec.rb7
-rw-r--r--spec/views/projects/merge_requests/edit.html.haml_spec.rb4
-rw-r--r--spec/views/projects/merge_requests/show.html.haml_spec.rb4
-rw-r--r--spec/views/projects/pipelines/_stage.html.haml_spec.rb2
-rw-r--r--spec/views/projects/pipelines/show.html.haml_spec.rb2
-rw-r--r--spec/views/projects/tree/show.html.haml_spec.rb2
-rw-r--r--spec/workers/delete_merged_branches_worker_spec.rb2
-rw-r--r--spec/workers/emails_on_push_worker_spec.rb2
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb2
-rw-r--r--spec/workers/group_destroy_worker_spec.rb2
-rw-r--r--spec/workers/pipeline_metrics_worker_spec.rb2
-rw-r--r--spec/workers/pipeline_notification_worker_spec.rb2
-rw-r--r--spec/workers/post_receive_spec.rb2
-rw-r--r--spec/workers/process_commit_worker_spec.rb2
-rw-r--r--spec/workers/project_cache_worker_spec.rb2
-rw-r--r--spec/workers/project_destroy_worker_spec.rb2
-rw-r--r--spec/workers/repository_check/batch_worker_spec.rb8
-rw-r--r--spec/workers/repository_fork_worker_spec.rb4
-rw-r--r--spec/workers/repository_import_worker_spec.rb2
-rw-r--r--spec/workers/update_merge_requests_worker_spec.rb2
-rw-r--r--vendor/assets/javascripts/notebooklab.js5061
120 files changed, 4095 insertions, 2875 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ff4da3a8884..476307e7076 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -292,14 +292,35 @@ rake karma:
paths:
- coverage-javascript/
-lint-doc:
+docs:check:apilint:
+ image: "phusion/baseimage"
stage: test
<<: *dedicated-runner
- image: "phusion/baseimage:latest"
+ variables:
+ GIT_DEPTH: "3"
+ cache: {}
+ dependencies: []
before_script: []
script:
- scripts/lint-doc.sh
+docs:check:links:
+ image: "registry.gitlab.com/gitlab-org/gitlab-build-images:nanoc-bootstrap-ruby-2.4-alpine"
+ stage: test
+ <<: *dedicated-runner
+ variables:
+ GIT_DEPTH: "3"
+ cache: {}
+ dependencies: []
+ before_script: []
+ script:
+ - mv doc/ /nanoc/content/
+ - cd /nanoc
+ # Build HTML from Markdown
+ - bundle exec nanoc
+ # Check the internal links
+ - bundle exec nanoc check internal_links
+
bundler:check:
stage: test
<<: *dedicated-runner
diff --git a/GITLAB_SHELL_VERSION b/GITLAB_SHELL_VERSION
index 0062ac97180..a1ef0cae183 100644
--- a/GITLAB_SHELL_VERSION
+++ b/GITLAB_SHELL_VERSION
@@ -1 +1 @@
-5.0.0
+5.0.2
diff --git a/app/assets/javascripts/environments/components/environment_actions.js b/app/assets/javascripts/environments/components/environment_actions.js
index 455a8819549..385085c03e2 100644
--- a/app/assets/javascripts/environments/components/environment_actions.js
+++ b/app/assets/javascripts/environments/components/environment_actions.js
@@ -25,6 +25,12 @@ export default {
};
},
+ computed: {
+ title() {
+ return 'Deploy to...';
+ },
+ },
+
methods: {
onClickAction(endpoint) {
this.isLoading = true;
@@ -44,8 +50,11 @@ export default {
template: `
<div class="btn-group" role="group">
<button
- class="dropdown btn btn-default dropdown-new js-dropdown-play-icon-container"
+ class="dropdown btn btn-default dropdown-new js-dropdown-play-icon-container has-tooltip"
+ data-container="body"
data-toggle="dropdown"
+ :title="title"
+ :aria-label="title"
:disabled="isLoading">
<span>
<span v-html="playIconSvg"></span>
diff --git a/app/assets/javascripts/environments/components/environment_external_url.js b/app/assets/javascripts/environments/components/environment_external_url.js
index b4f9eb357fd..d79b916c360 100644
--- a/app/assets/javascripts/environments/components/environment_external_url.js
+++ b/app/assets/javascripts/environments/components/environment_external_url.js
@@ -9,13 +9,21 @@ export default {
},
},
+ computed: {
+ title() {
+ return 'Open';
+ },
+ },
+
template: `
<a
- class="btn external_url"
+ class="btn external-url has-tooltip"
+ data-container="body"
:href="externalUrl"
target="_blank"
- rel="noopener noreferrer"
- title="Environment external URL">
+ rel="noopener noreferrer nofollow"
+ :title="title"
+ :aria-label="title">
<i class="fa fa-external-link" aria-hidden="true"></i>
</a>
`,
diff --git a/app/assets/javascripts/environments/components/environment_item.js b/app/assets/javascripts/environments/components/environment_item.js
index 66ed10e19d1..9c196562c6c 100644
--- a/app/assets/javascripts/environments/components/environment_item.js
+++ b/app/assets/javascripts/environments/components/environment_item.js
@@ -5,6 +5,7 @@ import ExternalUrlComponent from './environment_external_url';
import StopComponent from './environment_stop';
import RollbackComponent from './environment_rollback';
import TerminalButtonComponent from './environment_terminal_button';
+import MonitoringButtonComponent from './environment_monitoring';
import CommitComponent from '../../vue_shared/components/commit';
/**
@@ -22,6 +23,7 @@ export default {
'stop-component': StopComponent,
'rollback-component': RollbackComponent,
'terminal-button-component': TerminalButtonComponent,
+ 'monitoring-button-component': MonitoringButtonComponent,
},
props: {
@@ -392,6 +394,14 @@ export default {
return '';
},
+ monitoringUrl() {
+ if (this.model && this.model.metrics_path) {
+ return this.model.metrics_path;
+ }
+
+ return '';
+ },
+
/**
* Constructs folder URL based on the current location and the folder id.
*
@@ -496,13 +506,16 @@ export default {
<external-url-component v-if="externalURL && canReadEnvironment"
:external-url="externalURL"/>
- <stop-component v-if="hasStopAction && canCreateDeployment"
- :stop-url="model.stop_path"
- :service="service"/>
+ <monitoring-button-component v-if="monitoringUrl && canReadEnvironment"
+ :monitoring-url="monitoringUrl"/>
<terminal-button-component v-if="model && model.terminal_path"
:terminal-path="model.terminal_path"/>
+ <stop-component v-if="hasStopAction && canCreateDeployment"
+ :stop-url="model.stop_path"
+ :service="service"/>
+
<rollback-component v-if="canRetry && canCreateDeployment"
:is-last-deployment="isLastDeployment"
:retry-url="retryUrl"
diff --git a/app/assets/javascripts/environments/components/environment_monitoring.js b/app/assets/javascripts/environments/components/environment_monitoring.js
new file mode 100644
index 00000000000..064e2fc7434
--- /dev/null
+++ b/app/assets/javascripts/environments/components/environment_monitoring.js
@@ -0,0 +1,31 @@
+/**
+ * Renders the Monitoring (Metrics) link in environments table.
+ */
+export default {
+ props: {
+ monitoringUrl: {
+ type: String,
+ default: '',
+ required: true,
+ },
+ },
+
+ computed: {
+ title() {
+ return 'Monitoring';
+ },
+ },
+
+ template: `
+ <a
+ class="btn monitoring-url has-tooltip"
+ data-container="body"
+ :href="monitoringUrl"
+ target="_blank"
+ rel="noopener noreferrer nofollow"
+ :title="title"
+ :aria-label="title">
+ <i class="fa fa-area-chart" aria-hidden="true"></i>
+ </a>
+ `,
+};
diff --git a/app/assets/javascripts/environments/components/environment_stop.js b/app/assets/javascripts/environments/components/environment_stop.js
index 5404d647745..47102692024 100644
--- a/app/assets/javascripts/environments/components/environment_stop.js
+++ b/app/assets/javascripts/environments/components/environment_stop.js
@@ -25,6 +25,12 @@ export default {
};
},
+ computed: {
+ title() {
+ return 'Stop';
+ },
+ },
+
methods: {
onClick() {
if (confirm('Are you sure you want to stop this environment?')) {
@@ -45,10 +51,12 @@ export default {
template: `
<button type="button"
- class="btn stop-env-link"
+ class="btn stop-env-link has-tooltip"
+ data-container="body"
@click="onClick"
:disabled="isLoading"
- title="Stop Environment">
+ :title="title"
+ :aria-label="title">
<i class="fa fa-stop stop-env-icon" aria-hidden="true"></i>
<i v-if="isLoading" class="fa fa-spinner fa-spin" aria-hidden="true"></i>
</button>
diff --git a/app/assets/javascripts/environments/components/environment_terminal_button.js b/app/assets/javascripts/environments/components/environment_terminal_button.js
index 66a71faa02f..092a50a0d6f 100644
--- a/app/assets/javascripts/environments/components/environment_terminal_button.js
+++ b/app/assets/javascripts/environments/components/environment_terminal_button.js
@@ -14,12 +14,22 @@ export default {
},
data() {
- return { terminalIconSvg };
+ return {
+ terminalIconSvg,
+ };
+ },
+
+ computed: {
+ title() {
+ return 'Terminal';
+ },
},
template: `
- <a class="btn terminal-button"
- title="Open web terminal"
+ <a class="btn terminal-button has-tooltip"
+ data-container="body"
+ :title="title"
+ :aria-label="title"
:href="terminalPath">
${terminalIconSvg}
</a>
diff --git a/app/assets/javascripts/groups_select.js b/app/assets/javascripts/groups_select.js
index e5dfa30edab..602a3b78189 100644
--- a/app/assets/javascripts/groups_select.js
+++ b/app/assets/javascripts/groups_select.js
@@ -6,23 +6,60 @@ var slice = [].slice;
window.GroupsSelect = (function() {
function GroupsSelect() {
$('.ajax-groups-select').each((function(_this) {
+ const self = _this;
+
return function(i, select) {
var all_available, skip_groups;
- all_available = $(select).data('all-available');
- skip_groups = $(select).data('skip-groups') || [];
- return $(select).select2({
+ const $select = $(select);
+ all_available = $select.data('all-available');
+ skip_groups = $select.data('skip-groups') || [];
+
+ $select.select2({
placeholder: "Search for a group",
- multiple: $(select).hasClass('multiselect'),
+ multiple: $select.hasClass('multiselect'),
minimumInputLength: 0,
- query: function(query) {
- var options = { all_available: all_available, skip_groups: skip_groups };
- return Api.groups(query.term, options, function(groups) {
- var data;
- data = {
- results: groups
+ ajax: {
+ url: Api.buildUrl(Api.groupsPath),
+ dataType: 'json',
+ quietMillis: 250,
+ transport: function (params) {
+ $.ajax(params).then((data, status, xhr) => {
+ const results = data || [];
+
+ const headers = gl.utils.normalizeCRLFHeaders(xhr.getAllResponseHeaders());
+ const currentPage = parseInt(headers['X-PAGE'], 10) || 0;
+ const totalPages = parseInt(headers['X-TOTAL-PAGES'], 10) || 0;
+ const more = currentPage < totalPages;
+
+ return {
+ results,
+ pagination: {
+ more,
+ },
+ };
+ }).then(params.success).fail(params.error);
+ },
+ data: function (search, page) {
+ return {
+ search,
+ page,
+ per_page: GroupsSelect.PER_PAGE,
+ all_available,
+ skip_groups,
+ };
+ },
+ results: function (data, page) {
+ if (data.length) return { results: [] };
+
+ const results = data.length ? data : data.results || [];
+ const more = data.pagination ? data.pagination.more : false;
+
+ return {
+ results,
+ page,
+ more,
};
- return query.callback(data);
- });
+ },
},
initSelection: function(element, callback) {
var id;
@@ -34,19 +71,23 @@ window.GroupsSelect = (function() {
formatResult: function() {
var args;
args = 1 <= arguments.length ? slice.call(arguments, 0) : [];
- return _this.formatResult.apply(_this, args);
+ return self.formatResult.apply(self, args);
},
formatSelection: function() {
var args;
args = 1 <= arguments.length ? slice.call(arguments, 0) : [];
- return _this.formatSelection.apply(_this, args);
+ return self.formatSelection.apply(self, args);
},
- dropdownCssClass: "ajax-groups-dropdown",
+ dropdownCssClass: "ajax-groups-dropdown select2-infinite",
// we do not want to escape markup since we are displaying html in results
escapeMarkup: function(m) {
return m;
}
});
+
+ self.dropdown = document.querySelector('.select2-infinite .select2-results');
+
+ $select.on('select2-loaded', self.forceOverflow.bind(self));
};
})(this));
}
@@ -65,5 +106,12 @@ window.GroupsSelect = (function() {
return group.full_name;
};
+ GroupsSelect.prototype.forceOverflow = function (e) {
+ const itemHeight = this.dropdown.querySelector('.select2-result:first-child').clientHeight;
+ this.dropdown.style.height = `${Math.floor(this.dropdown.scrollHeight - (itemHeight * 0.9))}px`;
+ };
+
+ GroupsSelect.PER_PAGE = 20;
+
return GroupsSelect;
})();
diff --git a/app/assets/javascripts/lib/utils/common_utils.js b/app/assets/javascripts/lib/utils/common_utils.js
index a1423b6fda5..4aad0128aef 100644
--- a/app/assets/javascripts/lib/utils/common_utils.js
+++ b/app/assets/javascripts/lib/utils/common_utils.js
@@ -232,6 +232,22 @@
};
/**
+ this will take in the getAllResponseHeaders result and normalize them
+ this way we don't run into production issues when nginx gives us lowercased header keys
+ */
+ w.gl.utils.normalizeCRLFHeaders = (headers) => {
+ const headersObject = {};
+ const headersArray = headers.split('\n');
+
+ headersArray.forEach((header) => {
+ const keyValue = header.split(': ');
+ headersObject[keyValue[0]] = keyValue[1];
+ });
+
+ return w.gl.utils.normalizeHeaders(headersObject);
+ };
+
+ /**
* Parses pagination object string values into numbers.
*
* @param {Object} paginationInformation
diff --git a/app/assets/javascripts/right_sidebar.js b/app/assets/javascripts/right_sidebar.js
index f3cd41fb4db..a9b3de281e1 100644
--- a/app/assets/javascripts/right_sidebar.js
+++ b/app/assets/javascripts/right_sidebar.js
@@ -209,7 +209,7 @@ import Cookies from 'js-cookie';
};
Sidebar.prototype.setSidebarHeight = function() {
- const $navHeight = $('.navbar-gitlab').outerHeight() + $('.layout-nav').outerHeight();
+ const $navHeight = $('.navbar-gitlab').outerHeight() + $('.layout-nav').outerHeight() + $('.sub-nav-scroll').outerHeight();
const $rightSidebar = $('.js-right-sidebar');
const diff = $navHeight - $(window).scrollTop();
if (diff > 0) {
diff --git a/app/assets/javascripts/vue_pipelines_index/components/async_button.js b/app/assets/javascripts/vue_pipelines_index/components/async_button.js
index aaebf29d8ae..58b8db4d519 100644
--- a/app/assets/javascripts/vue_pipelines_index/components/async_button.js
+++ b/app/assets/javascripts/vue_pipelines_index/components/async_button.js
@@ -83,6 +83,7 @@ export default {
:class="buttonClass"
:title="title"
:aria-label="title"
+ data-container="body"
data-placement="top"
:disabled="isLoading">
<i :class="iconClass" aria-hidden="true"/>
diff --git a/app/assets/stylesheets/pages/environments.scss b/app/assets/stylesheets/pages/environments.scss
index 25be7f408d0..3d91e0b22d8 100644
--- a/app/assets/stylesheets/pages/environments.scss
+++ b/app/assets/stylesheets/pages/environments.scss
@@ -26,7 +26,7 @@
.table.ci-table {
.environments-actions {
- min-width: 200px;
+ min-width: 300px;
}
.environments-commit,
@@ -222,3 +222,12 @@
stroke: $black;
stroke-width: 1;
}
+
+.environments-actions {
+ .external-url,
+ .monitoring-url,
+ .terminal-button,
+ .stop-env-link {
+ width: 38px;
+ }
+}
diff --git a/app/controllers/admin/application_settings_controller.rb b/app/controllers/admin/application_settings_controller.rb
index 8d831ffdd70..0bfbe47eb4f 100644
--- a/app/controllers/admin/application_settings_controller.rb
+++ b/app/controllers/admin/application_settings_controller.rb
@@ -45,15 +45,6 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController
end
def application_setting_params
- restricted_levels = params[:application_setting][:restricted_visibility_levels]
- if restricted_levels.nil?
- params[:application_setting][:restricted_visibility_levels] = []
- else
- restricted_levels.map! do |level|
- level.to_i
- end
- end
-
import_sources = params[:application_setting][:import_sources]
if import_sources.nil?
params[:application_setting][:import_sources] = []
diff --git a/app/controllers/admin/background_jobs_controller.rb b/app/controllers/admin/background_jobs_controller.rb
index 87eb7ff4c93..5f90ad7137d 100644
--- a/app/controllers/admin/background_jobs_controller.rb
+++ b/app/controllers/admin/background_jobs_controller.rb
@@ -1,7 +1,7 @@
class Admin::BackgroundJobsController < Admin::ApplicationController
def show
ps_output, _ = Gitlab::Popen.popen(%W(ps ww -U #{Gitlab.config.gitlab.user} -o pid,pcpu,pmem,stat,start,command))
- @sidekiq_processes = ps_output.split("\n").grep(/sidekiq/)
+ @sidekiq_processes = ps_output.split("\n").grep(/sidekiq \d+\.\d+\.\d+/)
@concurrency = Sidekiq.options[:concurrency]
end
end
diff --git a/app/controllers/import/base_controller.rb b/app/controllers/import/base_controller.rb
index 256c41e6145..eeee027ef2d 100644
--- a/app/controllers/import/base_controller.rb
+++ b/app/controllers/import/base_controller.rb
@@ -11,7 +11,7 @@ class Import::BaseController < ApplicationController
namespace.add_owner(current_user)
namespace
rescue ActiveRecord::RecordNotUnique, ActiveRecord::RecordInvalid
- Namespace.find_by_path_or_name(name)
+ Namespace.find_by_full_path(name)
end
end
end
diff --git a/app/controllers/search_controller.rb b/app/controllers/search_controller.rb
index 612d69cf557..4a579601785 100644
--- a/app/controllers/search_controller.rb
+++ b/app/controllers/search_controller.rb
@@ -6,45 +6,19 @@ class SearchController < ApplicationController
layout 'search'
def show
- if params[:project_id].present?
- @project = Project.find_by(id: params[:project_id])
- @project = nil unless can?(current_user, :download_code, @project)
- end
+ search_service = SearchService.new(current_user, params)
- if params[:group_id].present?
- @group = Group.find_by(id: params[:group_id])
- @group = nil unless can?(current_user, :read_group, @group)
- end
+ @project = search_service.project
+ @group = search_service.group
return if params[:search].blank?
@search_term = params[:search]
- @scope = params[:scope]
- @show_snippets = params[:snippets].eql? 'true'
-
- @search_results =
- if @project
- unless %w(blobs notes issues merge_requests milestones wiki_blobs
- commits).include?(@scope)
- @scope = 'blobs'
- end
-
- Search::ProjectService.new(@project, current_user, params).execute
- elsif @show_snippets
- unless %w(snippet_blobs snippet_titles).include?(@scope)
- @scope = 'snippet_blobs'
- end
-
- Search::SnippetService.new(current_user, params).execute
- else
- unless %w(projects issues merge_requests milestones).include?(@scope)
- @scope = 'projects'
- end
- Search::GlobalService.new(current_user, params).execute
- end
-
- @search_objects = @search_results.objects(@scope, params[:page])
+ @scope = search_service.scope
+ @show_snippets = search_service.show_snippets?
+ @search_results = search_service.search_results
+ @search_objects = search_service.search_objects
check_single_commit_result
end
diff --git a/app/helpers/issuables_helper.rb b/app/helpers/issuables_helper.rb
index 749b6245edc..ec57fec4f99 100644
--- a/app/helpers/issuables_helper.rb
+++ b/app/helpers/issuables_helper.rb
@@ -251,7 +251,7 @@ module IssuablesHelper
end
def selected_template(issuable)
- params[:issuable_template] if issuable_templates(issuable).include?(params[:issuable_template])
+ params[:issuable_template] if issuable_templates(issuable).any?{ |template| template[:name] == params[:issuable_template] }
end
def issuable_todo_button_data(issuable, todo, is_collapsed)
diff --git a/app/helpers/sidekiq_helper.rb b/app/helpers/sidekiq_helper.rb
index b5017080cfb..55f4da0ef85 100644
--- a/app/helpers/sidekiq_helper.rb
+++ b/app/helpers/sidekiq_helper.rb
@@ -3,9 +3,9 @@ module SidekiqHelper
(?<pid>\d+)\s+
(?<cpu>[\d\.,]+)\s+
(?<mem>[\d\.,]+)\s+
- (?<state>[DRSTWXZNLsl\+<]+)\s+
- (?<start>.+)\s+
- (?<command>sidekiq.*\])
+ (?<state>[DIEKNRSTVWXZNLpsl\+<>\/\d]+)\s+
+ (?<start>.+?)\s+
+ (?<command>(?:ruby\d+:\s+)?sidekiq.*\].*)
\z/x
def parse_sidekiq_ps(line)
diff --git a/app/models/blob.rb b/app/models/blob.rb
index 5b71ac21cc0..95d2111a992 100644
--- a/app/models/blob.rb
+++ b/app/models/blob.rb
@@ -47,7 +47,7 @@ class Blob < SimpleDelegator
end
def ipython_notebook?
- text? && language && language.name == 'Jupyter Notebook'
+ text? && language&.name == 'Jupyter Notebook'
end
def size_within_svg_limits?
diff --git a/app/models/note.rb b/app/models/note.rb
index e22e96aec6f..16d66cb1427 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -37,6 +37,7 @@ class Note < ActiveRecord::Base
has_many :todos, dependent: :destroy
has_many :events, as: :target, dependent: :destroy
+ has_one :system_note_metadata
delegate :gfm_reference, :local_reference, to: :noteable
delegate :name, to: :project, prefix: true
@@ -70,7 +71,9 @@ class Note < ActiveRecord::Base
scope :fresh, ->{ order(created_at: :asc, id: :asc) }
scope :inc_author_project, ->{ includes(:project, :author) }
scope :inc_author, ->{ includes(:author) }
- scope :inc_relations_for_view, ->{ includes(:project, :author, :updated_by, :resolved_by, :award_emoji) }
+ scope :inc_relations_for_view, -> do
+ includes(:project, :author, :updated_by, :resolved_by, :award_emoji, :system_note_metadata)
+ end
scope :diff_notes, ->{ where(type: %w(LegacyDiffNote DiffNote)) }
scope :non_diff_notes, ->{ where(type: ['Note', nil]) }
diff --git a/app/models/project_services/jira_service.rb b/app/models/project_services/jira_service.rb
index eef403dba92..3b90fd1c2c7 100644
--- a/app/models/project_services/jira_service.rb
+++ b/app/models/project_services/jira_service.rb
@@ -62,7 +62,7 @@ class JiraService < IssueTrackerService
def help
"You need to configure JIRA before enabling this service. For more details
read the
- [JIRA service documentation](#{help_page_url('project_services/jira')})."
+ [JIRA service documentation](#{help_page_url('user/project/integrations/jira')})."
end
def title
diff --git a/app/models/project_team.rb b/app/models/project_team.rb
index 8a53e974b6f..6d6644053f8 100644
--- a/app/models/project_team.rb
+++ b/app/models/project_team.rb
@@ -169,6 +169,9 @@ class ProjectTeam
# Lookup only the IDs we need
user_ids = user_ids - access.keys
+
+ return access if user_ids.empty?
+
users_access = project.project_authorizations.
where(user: user_ids).
group(:user_id).
diff --git a/app/models/system_note_metadata.rb b/app/models/system_note_metadata.rb
new file mode 100644
index 00000000000..5cc66574941
--- /dev/null
+++ b/app/models/system_note_metadata.rb
@@ -0,0 +1,11 @@
+class SystemNoteMetadata < ActiveRecord::Base
+ ICON_TYPES = %w[
+ commit merge confidentiality status label assignee cross_reference
+ title time_tracking branch milestone discussion task moved
+ ].freeze
+
+ validates :note, presence: true
+ validates :action, inclusion: ICON_TYPES, allow_nil: true
+
+ belongs_to :note
+end
diff --git a/app/models/user.rb b/app/models/user.rb
index cbd741f96ed..95a766f2ede 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -635,8 +635,10 @@ class User < ActiveRecord::Base
end
def fork_of(project)
- links = ForkedProjectLink.where(forked_from_project_id: project, forked_to_project_id: personal_projects)
-
+ links = ForkedProjectLink.where(
+ forked_from_project_id: project,
+ forked_to_project_id: personal_projects.unscope(:order)
+ )
if links.any?
links.first.forked_to_project
else
diff --git a/app/serializers/environment_entity.rb b/app/serializers/environment_entity.rb
index 4c017960628..4ff15a78115 100644
--- a/app/serializers/environment_entity.rb
+++ b/app/serializers/environment_entity.rb
@@ -9,6 +9,13 @@ class EnvironmentEntity < Grape::Entity
expose :last_deployment, using: DeploymentEntity
expose :stop_action?
+ expose :metrics_path, if: -> (environment, _) { environment.has_metrics? } do |environment|
+ metrics_namespace_project_environment_path(
+ environment.project.namespace,
+ environment.project,
+ environment)
+ end
+
expose :environment_path do |environment|
namespace_project_environment_path(
environment.project.namespace,
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index a444c78b609..b7fe5cb168b 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -19,7 +19,7 @@ module Issues
if issue.previous_changes.include?('title') ||
issue.previous_changes.include?('description')
- todo_service.update_issue(issue, current_user)
+ todo_service.update_issue(issue, current_user, old_mentioned_users)
end
if issue.previous_changes.include?('milestone_id')
diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb
index 3cb9aae83f6..ab7fcf3b6e2 100644
--- a/app/services/merge_requests/update_service.rb
+++ b/app/services/merge_requests/update_service.rb
@@ -28,7 +28,7 @@ module MergeRequests
if merge_request.previous_changes.include?('title') ||
merge_request.previous_changes.include?('description')
- todo_service.update_merge_request(merge_request, current_user)
+ todo_service.update_merge_request(merge_request, current_user, old_mentioned_users)
end
if merge_request.previous_changes.include?('target_branch')
diff --git a/app/services/note_summary.rb b/app/services/note_summary.rb
new file mode 100644
index 00000000000..a6f6320d573
--- /dev/null
+++ b/app/services/note_summary.rb
@@ -0,0 +1,20 @@
+class NoteSummary
+ attr_reader :note
+ attr_reader :metadata
+
+ def initialize(noteable, project, author, body, action: nil, commit_count: nil)
+ @note = { noteable: noteable, project: project, author: author, note: body }
+ @metadata = { action: action, commit_count: commit_count }.compact
+
+ set_commit_params if note[:noteable].is_a?(Commit)
+ end
+
+ def metadata?
+ metadata.present?
+ end
+
+ def set_commit_params
+ note.merge!(noteable_type: 'Commit', commit_id: note[:noteable].id)
+ note[:noteable] = nil
+ end
+end
diff --git a/app/services/notes/update_service.rb b/app/services/notes/update_service.rb
index 75a4b3ed826..75fd08ea0a9 100644
--- a/app/services/notes/update_service.rb
+++ b/app/services/notes/update_service.rb
@@ -3,11 +3,13 @@ module Notes
def execute(note)
return note unless note.editable?
+ old_mentioned_users = note.mentioned_users.to_a
+
note.update_attributes(params.merge(updated_by: current_user))
note.create_new_cross_references!(current_user)
if note.previous_changes.include?('note')
- TodoService.new.update_note(note, current_user)
+ TodoService.new.update_note(note, current_user, old_mentioned_users)
end
note
diff --git a/app/services/search/global_service.rb b/app/services/search/global_service.rb
index 781cd13b44b..a3c655493a5 100644
--- a/app/services/search/global_service.rb
+++ b/app/services/search/global_service.rb
@@ -16,5 +16,9 @@ module Search
Gitlab::SearchResults.new(current_user, projects, params[:search])
end
+
+ def scope
+ @scope ||= %w[issues merge_requests milestones].delete(params[:scope]) { 'projects' }
+ end
end
end
diff --git a/app/services/search/project_service.rb b/app/services/search/project_service.rb
index 4b500914cfb..9a22abae635 100644
--- a/app/services/search/project_service.rb
+++ b/app/services/search/project_service.rb
@@ -12,5 +12,9 @@ module Search
params[:search],
params[:repository_ref])
end
+
+ def scope
+ @scope ||= %w[notes issues merge_requests milestones wiki_blobs commits].delete(params[:scope]) { 'blobs' }
+ end
end
end
diff --git a/app/services/search/snippet_service.rb b/app/services/search/snippet_service.rb
index 0b3e713e220..4f161beea4d 100644
--- a/app/services/search/snippet_service.rb
+++ b/app/services/search/snippet_service.rb
@@ -11,5 +11,9 @@ module Search
Gitlab::SnippetSearchResults.new(snippets, params[:search])
end
+
+ def scope
+ @scope ||= %w[snippet_titles].delete(params[:scope]) { 'snippet_blobs' }
+ end
end
end
diff --git a/app/services/search_service.rb b/app/services/search_service.rb
new file mode 100644
index 00000000000..8d46a8dab3e
--- /dev/null
+++ b/app/services/search_service.rb
@@ -0,0 +1,63 @@
+class SearchService
+ include Gitlab::Allowable
+
+ def initialize(current_user, params = {})
+ @current_user = current_user
+ @params = params.dup
+ end
+
+ def project
+ return @project if defined?(@project)
+
+ @project =
+ if params[:project_id].present?
+ the_project = Project.find_by(id: params[:project_id])
+ can?(current_user, :download_code, the_project) ? the_project : nil
+ else
+ nil
+ end
+ end
+
+ def group
+ return @group if defined?(@group)
+
+ @group =
+ if params[:group_id].present?
+ the_group = Group.find_by(id: params[:group_id])
+ can?(current_user, :read_group, the_group) ? the_group : nil
+ else
+ nil
+ end
+ end
+
+ def show_snippets?
+ return @show_snippets if defined?(@show_snippets)
+
+ @show_snippets = params[:snippets] == 'true'
+ end
+
+ delegate :scope, to: :search_service
+
+ def search_results
+ @search_results ||= search_service.execute
+ end
+
+ def search_objects
+ @search_objects ||= search_results.objects(scope, params[:page])
+ end
+
+ private
+
+ def search_service
+ @search_service ||=
+ if project
+ Search::ProjectService.new(project, current_user, params)
+ elsif show_snippets?
+ Search::SnippetService.new(current_user, params)
+ else
+ Search::GlobalService.new(current_user, params)
+ end
+ end
+
+ attr_reader :current_user, :params
+end
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 8e02fe3741a..d3e502b66dd 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -26,7 +26,7 @@ module SystemNoteService
body << new_commit_summary(new_commits).join("\n")
body << "\n\n[Compare with previous version](#{diff_comparison_url(noteable, project, oldrev)})"
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'commit', commit_count: total_count))
end
# Called when the assignee of a Noteable is changed or removed
@@ -46,7 +46,7 @@ module SystemNoteService
def change_assignee(noteable, project, author, assignee)
body = assignee.nil? ? 'removed assignee' : "assigned to #{assignee.to_reference}"
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'assignee'))
end
# Called when one or more labels on a Noteable are added and/or removed
@@ -86,7 +86,7 @@ module SystemNoteService
body << ' ' << 'label'.pluralize(labels_count)
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'label'))
end
# Called when the milestone of a Noteable is changed
@@ -106,7 +106,7 @@ module SystemNoteService
def change_milestone(noteable, project, author, milestone)
body = milestone.nil? ? 'removed milestone' : "changed milestone to #{milestone.to_reference(project)}"
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'milestone'))
end
# Called when the estimated time of a Noteable is changed
@@ -132,7 +132,7 @@ module SystemNoteService
"changed time estimate to #{parsed_time}"
end
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
end
# Called when the spent time of a Noteable is changed
@@ -161,7 +161,7 @@ module SystemNoteService
body = "#{action} #{parsed_time} of time spent"
end
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
end
# Called when the status of a Noteable is changed
@@ -183,53 +183,57 @@ module SystemNoteService
body = status.dup
body << " via #{source.gfm_reference(project)}" if source
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'status'))
end
# Called when 'merge when pipeline succeeds' is executed
def merge_when_pipeline_succeeds(noteable, project, author, last_commit)
body = "enabled an automatic merge when the pipeline for #{last_commit.to_reference(project)} succeeds"
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'merge'))
end
# Called when 'merge when pipeline succeeds' is canceled
def cancel_merge_when_pipeline_succeeds(noteable, project, author)
body = 'canceled the automatic merge'
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'merge'))
end
def remove_merge_request_wip(noteable, project, author)
body = 'unmarked as a **Work In Progress**'
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
end
def add_merge_request_wip(noteable, project, author)
body = 'marked as a **Work In Progress**'
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
end
def add_merge_request_wip_from_commit(noteable, project, author, commit)
body = "marked as a **Work In Progress** from #{commit.to_reference(project)}"
- create_note(noteable: noteable, project: project, author: author, note: body)
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
end
def self.resolve_all_discussions(merge_request, project, author)
body = "resolved all discussions"
- create_note(noteable: merge_request, project: project, author: author, note: body)
+ create_note(NoteSummary.new(merge_request, project, author, body, action: 'discussion'))
end
def discussion_continued_in_issue(discussion, project, author, issue)
body = "created #{issue.to_reference} to continue this discussion"
- note_attributes = discussion.reply_attributes.merge(project: project, author: author, note: body)
- note_attributes[:type] = note_attributes.delete(:note_type)
- create_note(note_attributes)
+ note_params = discussion.reply_attributes.merge(project: project, author: author, note: body)
+ note_params[:type] = note_params.delete(:note_type)
+
+ note = Note.create(note_params.merge(system: true))
+ note.system_note_metadata = SystemNoteMetadata.new({ action: 'discussion' })
+
+ note
end
# Called when the title of a Noteable is changed
@@ -253,7 +257,8 @@ module SystemNoteService
marked_new_title = Gitlab::Diff::InlineDiffMarker.new(new_title).mark(new_diffs, mode: :addition, markdown: true)
body = "changed title from **#{marked_old_title}** to **#{marked_new_title}**"
- create_note(noteable: noteable, project: project, author: author, note: body)
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'title'))
end
# Called when the confidentiality changes
@@ -269,7 +274,8 @@ module SystemNoteService
# Returns the created Note object
def change_issue_confidentiality(issue, project, author)
body = issue.confidential ? 'made the issue confidential' : 'made the issue visible to everyone'
- create_note(noteable: issue, project: project, author: author, note: body)
+
+ create_note(NoteSummary.new(issue, project, author, body, action: 'confidentiality'))
end
# Called when a branch in Noteable is changed
@@ -288,7 +294,8 @@ module SystemNoteService
# Returns the created Note object
def change_branch(noteable, project, author, branch_type, old_branch, new_branch)
body = "changed #{branch_type} branch from `#{old_branch}` to `#{new_branch}`"
- create_note(noteable: noteable, project: project, author: author, note: body)
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'branch'))
end
# Called when a branch in Noteable is added or deleted
@@ -314,7 +321,8 @@ module SystemNoteService
end
body = "#{verb} #{branch_type} branch `#{branch}`"
- create_note(noteable: noteable, project: project, author: author, note: body)
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'branch'))
end
# Called when a branch is created from the 'new branch' button on a issue
@@ -325,7 +333,8 @@ module SystemNoteService
link = url_helpers.namespace_project_compare_url(project.namespace, project, from: project.default_branch, to: branch)
body = "created branch [`#{branch}`](#{link})"
- create_note(noteable: issue, project: project, author: author, note: body)
+
+ create_note(NoteSummary.new(issue, project, author, body, action: 'branch'))
end
# Called when a Mentionable references a Noteable
@@ -349,23 +358,12 @@ module SystemNoteService
return if cross_reference_disallowed?(noteable, mentioner)
gfm_reference = mentioner.gfm_reference(noteable.project)
-
- note_options = {
- project: noteable.project,
- author: author,
- note: cross_reference_note_content(gfm_reference)
- }
-
- if noteable.is_a?(Commit)
- note_options.merge!(noteable_type: 'Commit', commit_id: noteable.id)
- else
- note_options[:noteable] = noteable
- end
+ body = cross_reference_note_content(gfm_reference)
if noteable.is_a?(ExternalIssue)
noteable.project.issues_tracker.create_cross_reference_note(noteable, mentioner, author)
else
- create_note(note_options)
+ create_note(NoteSummary.new(noteable, noteable.project, author, body, action: 'cross_reference'))
end
end
@@ -444,7 +442,8 @@ module SystemNoteService
def change_task_status(noteable, project, author, new_task)
status_label = new_task.complete? ? Taskable::COMPLETED : Taskable::INCOMPLETE
body = "marked the task **#{new_task.source}** as #{status_label}"
- create_note(noteable: noteable, project: project, author: author, note: body)
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'task'))
end
# Called when noteable has been moved to another project
@@ -466,7 +465,8 @@ module SystemNoteService
cross_reference = noteable_ref.to_reference(project)
body = "moved #{direction} #{cross_reference}"
- create_note(noteable: noteable, project: project, author: author, note: body)
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'moved'))
end
private
@@ -482,8 +482,11 @@ module SystemNoteService
end
end
- def create_note(args = {})
- Note.create(args.merge(system: true))
+ def create_note(note_summary)
+ note = Note.create(note_summary.note.merge(system: true))
+ note.system_note_metadata = SystemNoteMetadata.new(note_summary.metadata) if note_summary.metadata?
+
+ note
end
def cross_reference_note_prefix
diff --git a/app/services/todo_service.rb b/app/services/todo_service.rb
index bf7e76ec59e..2c56cb4c680 100644
--- a/app/services/todo_service.rb
+++ b/app/services/todo_service.rb
@@ -19,8 +19,8 @@ class TodoService
#
# * mark all pending todos related to the issue for the current user as done
#
- def update_issue(issue, current_user)
- update_issuable(issue, current_user)
+ def update_issue(issue, current_user, skip_users = [])
+ update_issuable(issue, current_user, skip_users)
end
# When close an issue we should:
@@ -60,8 +60,8 @@ class TodoService
#
# * create a todo for each mentioned user on merge request
#
- def update_merge_request(merge_request, current_user)
- update_issuable(merge_request, current_user)
+ def update_merge_request(merge_request, current_user, skip_users = [])
+ update_issuable(merge_request, current_user, skip_users)
end
# When close a merge request we should:
@@ -123,7 +123,7 @@ class TodoService
mark_pending_todos_as_done(merge_request, merge_request.author)
mark_pending_todos_as_done(merge_request, merge_request.merge_user) if merge_request.merge_when_pipeline_succeeds?
end
-
+
# When a merge request could not be automatically merged due to its unmergeable state we should:
#
# * create a todo for a merge_user
@@ -131,7 +131,7 @@ class TodoService
def merge_request_became_unmergeable(merge_request)
create_unmergeable_todo(merge_request, merge_request.merge_user) if merge_request.merge_when_pipeline_succeeds?
end
-
+
# When create a note we should:
#
# * mark all pending todos related to the noteable for the note author as done
@@ -146,8 +146,8 @@ class TodoService
# * mark all pending todos related to the noteable for the current user as done
# * create a todo for each new user mentioned on note
#
- def update_note(note, current_user)
- handle_note(note, current_user)
+ def update_note(note, current_user, skip_users = [])
+ handle_note(note, current_user, skip_users)
end
# When an emoji is awarded we should:
@@ -223,11 +223,11 @@ class TodoService
create_mention_todos(issuable.project, issuable, author)
end
- def update_issuable(issuable, author)
+ def update_issuable(issuable, author, skip_users = [])
# Skip toggling a task list item in a description
return if toggling_tasks?(issuable)
- create_mention_todos(issuable.project, issuable, author)
+ create_mention_todos(issuable.project, issuable, author, nil, skip_users)
end
def destroy_issuable(issuable, user)
@@ -239,7 +239,7 @@ class TodoService
issuable.tasks? && issuable.updated_tasks.any?
end
- def handle_note(note, author)
+ def handle_note(note, author, skip_users = [])
# Skip system notes, and notes on project snippet
return if note.system? || note.for_snippet?
@@ -247,7 +247,7 @@ class TodoService
target = note.noteable
mark_pending_todos_as_done(target, author)
- create_mention_todos(project, target, author, note)
+ create_mention_todos(project, target, author, note, skip_users)
end
def create_assignment_todo(issuable, author)
@@ -257,14 +257,14 @@ class TodoService
end
end
- def create_mention_todos(project, target, author, note = nil)
+ def create_mention_todos(project, target, author, note = nil, skip_users = [])
# Create Todos for directly addressed users
- directly_addressed_users = filter_directly_addressed_users(project, note || target, author)
+ directly_addressed_users = filter_directly_addressed_users(project, note || target, author, skip_users)
attributes = attributes_for_todo(project, target, author, Todo::DIRECTLY_ADDRESSED, note)
create_todos(directly_addressed_users, attributes)
# Create Todos for mentioned users
- mentioned_users = filter_mentioned_users(project, note || target, author)
+ mentioned_users = filter_mentioned_users(project, note || target, author, skip_users)
attributes = attributes_for_todo(project, target, author, Todo::MENTIONED, note)
create_todos(mentioned_users, attributes)
end
@@ -307,13 +307,13 @@ class TodoService
reject_users_without_access(users, project, target).uniq
end
- def filter_mentioned_users(project, target, author)
- mentioned_users = target.mentioned_users(author)
+ def filter_mentioned_users(project, target, author, skip_users = [])
+ mentioned_users = target.mentioned_users(author) - skip_users
filter_todo_users(mentioned_users, project, target)
end
- def filter_directly_addressed_users(project, target, author)
- directly_addressed_users = target.directly_addressed_users(author)
+ def filter_directly_addressed_users(project, target, author, skip_users = [])
+ directly_addressed_users = target.directly_addressed_users(author) - skip_users
filter_todo_users(directly_addressed_users, project, target)
end
diff --git a/app/services/users/create_service.rb b/app/services/users/create_service.rb
index f4f0b80f30a..193fcd85896 100644
--- a/app/services/users/create_service.rb
+++ b/app/services/users/create_service.rb
@@ -94,7 +94,7 @@ module Users
def build_user_params
if current_user&.is_admin?
user_params = params.slice(*admin_create_params)
- user_params[:created_by_id] = current_user.id
+ user_params[:created_by_id] = current_user&.id
if params[:reset_password]
user_params.merge!(force_random_password: true, password_expires_at: nil)
diff --git a/app/views/shared/members/_member.html.haml b/app/views/shared/members/_member.html.haml
index 8e721c9c8dd..a5aa768b1b2 100644
--- a/app/views/shared/members/_member.html.haml
+++ b/app/views/shared/members/_member.html.haml
@@ -31,7 +31,7 @@
Joined #{time_ago_with_tooltip(member.created_at)}
- if member.expires?
·
- %span{ class: ('text-warning' if member.expires_soon?) }
+ %span{ class: "#{"text-warning" if member.expires_soon?} has-tooltip", title: member.expires_at.to_time.in_time_zone.to_s(:medium) }
Expires in #{distance_of_time_in_words_to_now(member.expires_at)}
- else
@@ -47,7 +47,7 @@
- current_resource = @project || @group
.controls.member-controls
- if show_controls && member.source == current_resource
- - if user != current_user
+ - if user != current_user && can_admin_member
= form_for member, remote: true, html: { class: 'form-horizontal js-edit-member-form' } do |f|
= f.hidden_field :access_level
.member-form-control.dropdown.append-right-5
diff --git a/changelogs/unreleased/24784-system-notes-meta-data.yml b/changelogs/unreleased/24784-system-notes-meta-data.yml
new file mode 100644
index 00000000000..757ae9e0527
--- /dev/null
+++ b/changelogs/unreleased/24784-system-notes-meta-data.yml
@@ -0,0 +1,4 @@
+---
+title: Add metadata to system notes
+merge_request: 9964
+author:
diff --git a/changelogs/unreleased/24861-stringify-group-member-details.yml b/changelogs/unreleased/24861-stringify-group-member-details.yml
new file mode 100644
index 00000000000..f56a1060862
--- /dev/null
+++ b/changelogs/unreleased/24861-stringify-group-member-details.yml
@@ -0,0 +1,4 @@
+---
+title: Hide form inputs for group member without editing rights
+merge_request: 7816
+author:
diff --git a/changelogs/unreleased/26595-fix-issue-preselected-template.yml b/changelogs/unreleased/26595-fix-issue-preselected-template.yml
new file mode 100644
index 00000000000..a94765f8f2a
--- /dev/null
+++ b/changelogs/unreleased/26595-fix-issue-preselected-template.yml
@@ -0,0 +1,4 @@
+---
+title: Fix linking to new issue with selected template via url parameter
+merge_request:
+author:
diff --git a/changelogs/unreleased/28799-todo-creation.yml b/changelogs/unreleased/28799-todo-creation.yml
new file mode 100644
index 00000000000..c6e05468568
--- /dev/null
+++ b/changelogs/unreleased/28799-todo-creation.yml
@@ -0,0 +1,4 @@
+---
+title: Create todos only for new mentions
+merge_request:
+author:
diff --git a/changelogs/unreleased/29341-add-metrics-button-env-overview.yml b/changelogs/unreleased/29341-add-metrics-button-env-overview.yml
new file mode 100644
index 00000000000..16b69235dff
--- /dev/null
+++ b/changelogs/unreleased/29341-add-metrics-button-env-overview.yml
@@ -0,0 +1,4 @@
+---
+title: Add metrics button to environments overview page
+merge_request: 10234
+author:
diff --git a/changelogs/unreleased/29929-jira-doc.yml b/changelogs/unreleased/29929-jira-doc.yml
new file mode 100644
index 00000000000..f79dcd84634
--- /dev/null
+++ b/changelogs/unreleased/29929-jira-doc.yml
@@ -0,0 +1,4 @@
+---
+title: Fix link to Jira service documentation
+merge_request:
+author:
diff --git a/changelogs/unreleased/30289-allow-users-to-import-github-projects-to-subgroups.yml b/changelogs/unreleased/30289-allow-users-to-import-github-projects-to-subgroups.yml
new file mode 100644
index 00000000000..a33382a85e3
--- /dev/null
+++ b/changelogs/unreleased/30289-allow-users-to-import-github-projects-to-subgroups.yml
@@ -0,0 +1,4 @@
+---
+title: Allow users to import GitHub projects to subgroups
+merge_request:
+author:
diff --git a/changelogs/unreleased/fix_admin_monitoring_background.yml b/changelogs/unreleased/fix_admin_monitoring_background.yml
new file mode 100644
index 00000000000..3a9a1c88672
--- /dev/null
+++ b/changelogs/unreleased/fix_admin_monitoring_background.yml
@@ -0,0 +1,4 @@
+---
+title: Handle parsing OpenBSD ps output properly to display sidekiq infos on admin->monitoring->background
+merge_request: 10303
+author: Sebastian Reitenbach
diff --git a/changelogs/unreleased/forked-subquery-order.yml b/changelogs/unreleased/forked-subquery-order.yml
new file mode 100644
index 00000000000..06fb8236783
--- /dev/null
+++ b/changelogs/unreleased/forked-subquery-order.yml
@@ -0,0 +1,4 @@
+---
+title: Remove unnecessary ORDER BY clause from `forked_to_project_id` subquery
+merge_request:
+author: mhasbini
diff --git a/config/initializers/0_inflections.rb b/config/initializers/0_inflections.rb
index d4197da3fa9..f977104ff9d 100644
--- a/config/initializers/0_inflections.rb
+++ b/config/initializers/0_inflections.rb
@@ -10,5 +10,5 @@
# end
#
ActiveSupport::Inflector.inflections do |inflect|
- inflect.uncountable %w(award_emoji project_statistics)
+ inflect.uncountable %w(award_emoji project_statistics system_note_metadata)
end
diff --git a/db/fixtures/production/001_admin.rb b/db/fixtures/production/001_admin.rb
index b37dc794015..1c7c89f7bbd 100644
--- a/db/fixtures/production/001_admin.rb
+++ b/db/fixtures/production/001_admin.rb
@@ -12,10 +12,12 @@ else
user_args[:password] = ENV['GITLAB_ROOT_PASSWORD']
end
-user = User.new(user_args)
-user.skip_confirmation!
+# Only admins can create other admin users in Users::CreateService so to solve
+# the chicken-and-egg problem, we pass a non-persisted admin user to the service.
+transient_admin = User.new(admin: true)
+user = Users::CreateService.new(transient_admin, user_args.merge!(skip_confirmation: true)).execute
-if user.save
+if user.persisted?
puts "Administrator account created:".color(:green)
puts
puts "login: root".color(:green)
diff --git a/db/migrate/20170314082049_create_system_note_metadata.rb b/db/migrate/20170314082049_create_system_note_metadata.rb
new file mode 100644
index 00000000000..dd1e6cf8172
--- /dev/null
+++ b/db/migrate/20170314082049_create_system_note_metadata.rb
@@ -0,0 +1,23 @@
+class CreateSystemNoteMetadata < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ create_table :system_note_metadata do |t|
+ t.references :note, null: false
+ t.integer :commit_count
+ t.string :action
+
+ t.timestamps null: false
+ end
+
+ add_concurrent_foreign_key :system_note_metadata, :notes, column: :note_id
+ end
+
+ def down
+ drop_table :system_note_metadata
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index f476637ceb2..dba242548c1 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -1075,6 +1075,14 @@ ActiveRecord::Schema.define(version: 20170317203554) do
add_index "subscriptions", ["subscribable_id", "subscribable_type", "user_id", "project_id"], name: "index_subscriptions_on_subscribable_and_user_id_and_project_id", unique: true, using: :btree
+ create_table "system_note_metadata", force: :cascade do |t|
+ t.integer "note_id", null: false
+ t.integer "commit_count"
+ t.string "action"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ end
+
create_table "taggings", force: :cascade do |t|
t.integer "tag_id"
t.integer "taggable_id"
@@ -1308,6 +1316,7 @@ ActiveRecord::Schema.define(version: 20170317203554) do
add_foreign_key "protected_branch_merge_access_levels", "protected_branches"
add_foreign_key "protected_branch_push_access_levels", "protected_branches"
add_foreign_key "subscriptions", "projects", on_delete: :cascade
+ add_foreign_key "system_note_metadata", "notes", name: "fk_d83a918cb1", on_delete: :cascade
add_foreign_key "timelogs", "issues", name: "fk_timelogs_issues_issue_id", on_delete: :cascade
add_foreign_key "timelogs", "merge_requests", name: "fk_timelogs_merge_requests_merge_request_id", on_delete: :cascade
add_foreign_key "trending_projects", "projects", on_delete: :cascade
diff --git a/doc/administration/high_availability/database.md b/doc/administration/high_availability/database.md
index cf3aca106e9..c22b1af8bfb 100644
--- a/doc/administration/high_availability/database.md
+++ b/doc/administration/high_availability/database.md
@@ -13,8 +13,8 @@ Database Service (RDS) that runs PostgreSQL.
If you use a cloud-managed service, or provide your own PostgreSQL:
-1. Setup PostgreSQL according to the
- [database requirements document](doc/install/requirements.md#database).
+1. Setup PostgreSQL according to the
+ [database requirements document](../../install/requirements.md#database).
1. Set up a `gitlab` username with a password of your choice. The `gitlab` user
needs privileges to create the `gitlabhq_production` database.
1. Configure the GitLab application servers with the appropriate details.
diff --git a/doc/ci/examples/deployment/README.md b/doc/ci/examples/deployment/README.md
index d28aa282825..7b0995597c4 100644
--- a/doc/ci/examples/deployment/README.md
+++ b/doc/ci/examples/deployment/README.md
@@ -1,20 +1,30 @@
-## Using Dpl as deployment tool
-Dpl (dee-pee-ell) is a deploy tool made for continuous deployment that's developed and used by Travis CI, but can also be used with GitLab CI.
+# Using Dpl as deployment tool
-**We recommend to use Dpl, if you're deploying to any of these of these services: https://github.com/travis-ci/dpl#supported-providers**.
+[Dpl](https://github.com/travis-ci/dpl) (dee-pee-ell) is a deploy tool made for
+continuous deployment that's developed and used by Travis CI, but can also be
+used with GitLab CI.
-### Requirements
-To use Dpl you need at least Ruby 1.8.7 with ability to install gems.
+>**Note:**
+We recommend to use Dpl if you're deploying to any of these of these services:
+https://github.com/travis-ci/dpl#supported-providers.
+
+## Requirements
+
+To use Dpl you need at least Ruby 1.9.3 with ability to install gems.
+
+## Basic usage
+
+Dpl can be installed on any machine with:
-### Basic usage
-The Dpl can be installed on any machine with:
```
gem install dpl
```
-This allows you to test all commands from your shell, rather than having to test it on a CI server.
+This allows you to test all commands from your local terminal, rather than
+having to test it on a CI server.
If you don't have Ruby installed you can do it on Debian-compatible Linux with:
+
```
apt-get update
apt-get install ruby-dev
@@ -26,9 +36,10 @@ To use it simply define provider and any additional parameters required by the p
For example if you want to use it to deploy your application to heroku, you need to specify `heroku` as provider, specify `api-key` and `app`.
There's more and all possible parameters can be found here: https://github.com/travis-ci/dpl#heroku
-```
+```yaml
staging:
- type: deploy
+ stage: deploy
+ script:
- gem install dpl
- dpl --provider=heroku --app=my-app-staging --api-key=$HEROKU_STAGING_API_KEY
```
@@ -37,14 +48,17 @@ In the above example we use Dpl to deploy `my-app-staging` to Heroku server with
To use different provider take a look at long list of [Supported Providers](https://github.com/travis-ci/dpl#supported-providers).
-### Using Dpl with Docker
+## Using Dpl with Docker
+
When you use GitLab Runner you most likely configured it to use your server's shell commands.
This means that all commands are run in context of local user (ie. gitlab_runner or gitlab_ci_multi_runner).
It also means that most probably in your Docker container you don't have the Ruby runtime installed.
You will have to install it:
-```
+
+```yaml
staging:
- type: deploy
+ stage: deploy
+ script:
- apt-get update -yq
- apt-get install -y ruby-dev
- gem install dpl
@@ -53,24 +67,31 @@ staging:
- master
```
-The first line `apt-get update -yq` updates the list of available packages, where second `apt-get install -y ruby-dev` install `Ruby` runtime on system.
+The first line `apt-get update -yq` updates the list of available packages,
+where second `apt-get install -y ruby-dev` installs the Ruby runtime on system.
The above example is valid for all Debian-compatible systems.
-### Usage in staging and production
-It's pretty common in developer workflow to have staging (development) and production environment.
-If we consider above example: we would like to deploy `master` branch to `staging` and `all tags` to `production` environment.
+## Usage in staging and production
+
+It's pretty common in the development workflow to have staging (development) and
+production environments
+
+Let's consider the following example: we would like to deploy the `master`
+branch to `staging` and all tags to the `production` environment.
The final `.gitlab-ci.yml` for that setup would look like this:
-```
+```yaml
staging:
- type: deploy
+ stage: deploy
+ script:
- gem install dpl
- dpl --provider=heroku --app=my-app-staging --api-key=$HEROKU_STAGING_API_KEY
only:
- master
-
+
production:
- type: deploy
+ stage: deploy
+ script:
- gem install dpl
- dpl --provider=heroku --app=my-app-production --api-key=$HEROKU_PRODUCTION_API_KEY
only:
@@ -78,21 +99,28 @@ production:
```
We created two deploy jobs that are executed on different events:
+
1. `staging` is executed for all commits that were pushed to `master` branch,
2. `production` is executed for all pushed tags.
We also use two secure variables:
+
1. `HEROKU_STAGING_API_KEY` - Heroku API key used to deploy staging app,
2. `HEROKU_PRODUCTION_API_KEY` - Heroku API key used to deploy production app.
-### Storing API keys
-In GitLab CI 7.12 a new feature was introduced: Secure Variables.
-Secure Variables can added by going to `Project > Variables > Add Variable`.
-**This feature requires `gitlab-runner` with version equal or greater than 0.4.0.**
-The variables that are defined in the project settings are sent along with the build script to the runner.
-The secure variables are stored out of the repository. Never store secrets in your projects' .gitlab-ci.yml.
-It is also important that secret's value is hidden in the job log.
+## Storing API keys
+
+Secure Variables can added by going to your project's
+**Settings âž” CI/CD Pipelines âž” Secret variables**. The variables that are defined
+in the project settings are sent along with the build script to the Runner.
+The secure variables are stored out of the repository. Never store secrets in
+your project's `.gitlab-ci.yml`. It is also important that the secret's value
+is hidden in the job log.
+
+You access added variable by prefixing it's name with `$` (on non-Windows runners)
+or `%` (for Windows Batch runners):
-You access added variable by prefixing it's name with `$` (on non-Windows runners) or `%` (for Windows Batch runners):
1. `$SECRET_VARIABLE` - use it for non-Windows runners
2. `%SECRET_VARIABLE%` - use it for Windows Batch runners
+
+Read more about the [CI variables](../../variables/README.md).
diff --git a/doc/development/fe_guide/performance.md b/doc/development/fe_guide/performance.md
index 2d76bb18cff..9437a5f7a6e 100644
--- a/doc/development/fe_guide/performance.md
+++ b/doc/development/fe_guide/performance.md
@@ -17,8 +17,7 @@ polling rate.
A `Poll-Interval: -1` means you should disable polling, and this must be implemented.
1. A response with HTTP status `4XX` or `5XX` should disable polling as well.
1. Use a common library for polling.
-1. Poll on active tabs only. Use a common library to find out which tab currently has eyes on it.
-Please use [Focus](https://gitlab.com/andrewn/focus). Specifically [Eyeballs Detector](https://gitlab.com/andrewn/focus/blob/master/lib/eyeballs-detector.js).
+1. Poll on active tabs only. Please use [Visibility](https://github.com/ai/visibilityjs).
1. Use regular polling intervals, do not use backoff polling, or jitter, as the interval will be
controlled by the server.
1. The backend code will most likely be using etags. You do not and should not check for status
diff --git a/doc/development/fe_guide/testing.md b/doc/development/fe_guide/testing.md
index bb6adeacc4c..8d3513d3566 100644
--- a/doc/development/fe_guide/testing.md
+++ b/doc/development/fe_guide/testing.md
@@ -7,7 +7,7 @@ feature tests with Capybara for integration testing.
Feature tests need to be written for all new features. Regression tests ought
to be written for all bug fixes to prevent them from recurring in the future.
-See [the Testing Standards and Style Guidelines](/doc/development/testing.md)
+See [the Testing Standards and Style Guidelines](../testing.md)
for more information on general testing practices at GitLab.
## Karma test suite
@@ -48,7 +48,7 @@ remove these directives when you commit your code.
Information on setting up and running RSpec integration tests with
[Capybara][capybara] can be found in the
-[general testing guide](/doc/development/testing.md).
+[general testing guide](../testing.md).
## Gotchas
diff --git a/doc/user/project/new_ci_build_permissions_model.md b/doc/user/project/new_ci_build_permissions_model.md
index 55610a7b014..f846736028f 100644
--- a/doc/user/project/new_ci_build_permissions_model.md
+++ b/doc/user/project/new_ci_build_permissions_model.md
@@ -141,6 +141,7 @@ with GitLab 8.12.
With the new job permissions model, there is now an easy way to access all
dependent source code in a project. That way, we can:
+1. Access a project's dependent repositories
1. Access a project's [Git submodules][gitsub]
1. Access private container images
1. Access project's and submodule LFS objects
@@ -177,6 +178,22 @@ As a user:
access to. As an Administrator, you can verify that by impersonating the user
and retry the failing job in order to verify that everything is correct.
+### Dependent repositories
+
+The [Job environment variable][jobenv] `CI_JOB_TOKEN` can be used to
+authenticate any clones of dependent repositories. For example:
+
+```
+git clone https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.com/myuser/mydependentrepo
+```
+
+It can also be used for system-wide authentication
+(only do this in a docker container, it will overwrite ~/.netrc):
+
+```
+echo -e "machine gitlab.com\nlogin gitlab-ci-token\npassword ${CI_JOB_TOKEN}" > ~/.netrc
+```
+
### Git submodules
To properly configure submodules with GitLab CI, read the
@@ -221,3 +238,4 @@ test:
[triggers]: ../../ci/triggers/README.md
[update-docs]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/update
[workhorse]: https://gitlab.com/gitlab-org/gitlab-workhorse
+[jobenv]: ../../ci/variables/README.md#predefined-variables-environment-variables
diff --git a/features/support/env.rb b/features/support/env.rb
index f394c30d52f..26cdd9d746d 100644
--- a/features/support/env.rb
+++ b/features/support/env.rb
@@ -33,3 +33,19 @@ Spinach.hooks.before_run do
include FactoryGirl::Syntax::Methods
end
+
+module StdoutReporterWithScenarioLocation
+ # Override the standard reporter to show filename and line number next to each
+ # scenario for easy, focused re-runs
+ def before_scenario_run(scenario, step_definitions = nil)
+ @max_step_name_length = scenario.steps.map(&:name).map(&:length).max if scenario.steps.any?
+ name = scenario.name
+
+ # This number has no significance, it's just to line things up
+ max_length = @max_step_name_length + 19
+ out.puts "\n #{'Scenario:'.green} #{name.light_green.ljust(max_length)}" \
+ " # #{scenario.feature.filename}:#{scenario.line}"
+ end
+end
+
+Spinach::Reporter::Stdout.prepend(StdoutReporterWithScenarioLocation)
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index 057f7748c3e..4e72519c81d 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -346,7 +346,12 @@ module Gitlab
cmd << "--after=#{options[:after].iso8601}" if options[:after]
cmd << "--before=#{options[:before].iso8601}" if options[:before]
cmd << sha
- cmd += %W[-- #{options[:path]}] if options[:path].present?
+
+ # :path can be a string or an array of strings
+ if options[:path].present?
+ cmd << '--'
+ cmd += Array(options[:path])
+ end
raw_output = IO.popen(cmd) { |io| io.read }
lines = offset_in_ruby ? raw_output.lines.drop(offset) : raw_output.lines
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index 84a1ce773a1..5dd8f66343f 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -6,23 +6,34 @@ describe Admin::ApplicationSettingsController do
let(:admin) { create(:admin) }
before do
- sign_in(admin)
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
- describe 'PATCH #update' do
+ describe 'PUT #update' do
+ before do
+ sign_in(admin)
+ end
+
it 'updates the default_project_visibility for string value' do
- patch :update, application_setting: { default_project_visibility: "20" }
+ put :update, application_setting: { default_project_visibility: "20" }
+
+ expect(response).to redirect_to(admin_application_settings_path)
+ expect(ApplicationSetting.current.default_project_visibility).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ it 'update the restricted levels for string values' do
+ put :update, application_setting: { restricted_visibility_levels: %w[10 20] }
expect(response).to redirect_to(admin_application_settings_path)
- expect(ApplicationSetting.current.default_project_visibility).to eq Gitlab::VisibilityLevel::PUBLIC
+ expect(ApplicationSetting.current.restricted_visibility_levels).to eq([10, 20])
end
- it 'falls back to default with default_project_visibility setting is omitted' do
- patch :update, application_setting: {}
+ it 'falls back to defaults when settings are omitted' do
+ put :update, application_setting: {}
expect(response).to redirect_to(admin_application_settings_path)
- expect(ApplicationSetting.current.default_project_visibility).to eq Gitlab::VisibilityLevel::PRIVATE
+ expect(ApplicationSetting.current.default_project_visibility).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ expect(ApplicationSetting.current.restricted_visibility_levels).to be_empty
end
end
end
diff --git a/spec/factories/system_note_metadata.rb b/spec/factories/system_note_metadata.rb
new file mode 100644
index 00000000000..f487a2d7e4a
--- /dev/null
+++ b/spec/factories/system_note_metadata.rb
@@ -0,0 +1,6 @@
+FactoryGirl.define do
+ factory :system_note_metadata do
+ note
+ action 'merge'
+ end
+end
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
index a58aedc924e..7afceb88cf9 100644
--- a/spec/features/issues_spec.rb
+++ b/spec/features/issues_spec.rb
@@ -13,6 +13,13 @@ describe 'Issues', feature: true do
user2 = create(:user)
project.team << [[@user, user2], :developer]
+
+ project.repository.create_file(
+ @user,
+ '.gitlab/issue_templates/bug.md',
+ 'this is a test "bug" template',
+ message: 'added issue template',
+ branch_name: 'master')
end
describe 'Edit issue' do
@@ -600,6 +607,16 @@ describe 'Issues', feature: true do
expect(page.find_field("issue_description").value).to match /\n\n$/
end
end
+
+ context 'form filled by URL parameters' do
+ before do
+ visit new_namespace_project_issue_path(project.namespace, project, issuable_template: 'bug')
+ end
+
+ it 'fills in template' do
+ expect(find('.js-issuable-selector .dropdown-toggle-text')).to have_content('bug')
+ end
+ end
end
describe 'new issue by email' do
diff --git a/spec/features/projects/group_links_spec.rb b/spec/features/projects/group_links_spec.rb
index 4c28205da9b..c969acc9140 100644
--- a/spec/features/projects/group_links_spec.rb
+++ b/spec/features/projects/group_links_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-feature 'Project group links', feature: true, js: true do
+feature 'Project group links', :feature, :js do
include Select2Helper
let(:master) { create(:user) }
@@ -51,4 +51,24 @@ feature 'Project group links', feature: true, js: true do
end
end
end
+
+ describe 'the groups dropdown' do
+ before do
+ group_two = create(:group)
+ group.add_owner(master)
+ group_two.add_owner(master)
+
+ visit namespace_project_settings_members_path(project.namespace, project)
+ execute_script 'GroupsSelect.PER_PAGE = 1;'
+ open_select2 '#link_group_id'
+ end
+
+ it 'should infinitely scroll' do
+ expect(find('.select2-drop .select2-results')).to have_selector('.select2-result', count: 1)
+
+ scroll_select2_to_bottom('.select2-drop .select2-results:visible')
+
+ expect(find('.select2-drop .select2-results')).to have_selector('.select2-result', count: 2)
+ end
+ end
end
diff --git a/spec/helpers/sidekiq_helper_spec.rb b/spec/helpers/sidekiq_helper_spec.rb
index f86e496740a..117abc9c556 100644
--- a/spec/helpers/sidekiq_helper_spec.rb
+++ b/spec/helpers/sidekiq_helper_spec.rb
@@ -53,6 +53,14 @@ describe SidekiqHelper do
expect(parts).to eq(['17725', '1.0', '12.1', 'Ssl', '19:20:15', 'sidekiq 4.2.1 gitlab-rails [0 of 25 busy]'])
end
+ it 'parses OpenBSD output' do
+ # OpenBSD 6.1
+ line = '49258 0.5 2.3 R/0 Fri10PM ruby23: sidekiq 4.2.7 gitlab [0 of 25 busy] (ruby23)'
+ parts = helper.parse_sidekiq_ps(line)
+
+ expect(parts).to eq(['49258', '0.5', '2.3', 'R/0', 'Fri10PM', 'ruby23: sidekiq 4.2.7 gitlab [0 of 25 busy] (ruby23)'])
+ end
+
it 'does fail gracefully on line not matching the format' do
line = '55137 10.0 2.1 S+ 2:30pm something'
parts = helper.parse_sidekiq_ps(line)
diff --git a/spec/javascripts/environments/environment_actions_spec.js b/spec/javascripts/environments/environment_actions_spec.js
index 85b73f1d4e2..13840b42bd6 100644
--- a/spec/javascripts/environments/environment_actions_spec.js
+++ b/spec/javascripts/environments/environment_actions_spec.js
@@ -32,7 +32,12 @@ describe('Actions Component', () => {
}).$mount();
});
- it('should render a dropdown with the provided actions', () => {
+ it('should render a dropdown button with icon and title attribute', () => {
+ expect(component.$el.querySelector('.fa-caret-down')).toBeDefined();
+ expect(component.$el.querySelector('.dropdown-new').getAttribute('title')).toEqual('Deploy to...');
+ });
+
+ it('should render a dropdown with the provided list of actions', () => {
expect(
component.$el.querySelectorAll('.dropdown-menu li').length,
).toEqual(actionsMock.length);
diff --git a/spec/javascripts/environments/environment_monitoring_spec.js b/spec/javascripts/environments/environment_monitoring_spec.js
new file mode 100644
index 00000000000..fc451cce641
--- /dev/null
+++ b/spec/javascripts/environments/environment_monitoring_spec.js
@@ -0,0 +1,23 @@
+import Vue from 'vue';
+import monitoringComp from '~/environments/components/environment_monitoring';
+
+describe('Monitoring Component', () => {
+ let MonitoringComponent;
+
+ beforeEach(() => {
+ MonitoringComponent = Vue.extend(monitoringComp);
+ });
+
+ it('should render a link to environment monitoring page', () => {
+ const monitoringUrl = 'https://gitlab.com';
+ const component = new MonitoringComponent({
+ propsData: {
+ monitoringUrl,
+ },
+ }).$mount();
+
+ expect(component.$el.getAttribute('href')).toEqual(monitoringUrl);
+ expect(component.$el.querySelector('.fa-area-chart')).toBeDefined();
+ expect(component.$el.getAttribute('title')).toEqual('Monitoring');
+ });
+});
diff --git a/spec/javascripts/environments/environment_stop_spec.js b/spec/javascripts/environments/environment_stop_spec.js
index 8f79b88f3df..01055e3f255 100644
--- a/spec/javascripts/environments/environment_stop_spec.js
+++ b/spec/javascripts/environments/environment_stop_spec.js
@@ -24,7 +24,7 @@ describe('Stop Component', () => {
it('should render a button to stop the environment', () => {
expect(component.$el.tagName).toEqual('BUTTON');
- expect(component.$el.getAttribute('title')).toEqual('Stop Environment');
+ expect(component.$el.getAttribute('title')).toEqual('Stop');
});
it('should call the service when an action is clicked', () => {
diff --git a/spec/javascripts/environments/environment_terminal_button_spec.js b/spec/javascripts/environments/environment_terminal_button_spec.js
index b07aa4e1745..be2289edc2b 100644
--- a/spec/javascripts/environments/environment_terminal_button_spec.js
+++ b/spec/javascripts/environments/environment_terminal_button_spec.js
@@ -18,7 +18,7 @@ describe('Stop Component', () => {
it('should render a link to open a web terminal with the provided path', () => {
expect(component.$el.tagName).toEqual('A');
- expect(component.$el.getAttribute('title')).toEqual('Open web terminal');
+ expect(component.$el.getAttribute('title')).toEqual('Terminal');
expect(component.$el.getAttribute('href')).toEqual(terminalPath);
});
});
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index d2e24eb7eb2..7cf39d37181 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -108,6 +108,37 @@ require('~/lib/utils/common_utils');
});
});
+ describe('gl.utils.normalizeCRLFHeaders', () => {
+ beforeEach(function () {
+ this.CLRFHeaders = 'a-header: a-value\nAnother-Header: ANOTHER-VALUE\nLaSt-HeAdEr: last-VALUE';
+
+ spyOn(String.prototype, 'split').and.callThrough();
+ spyOn(gl.utils, 'normalizeHeaders').and.callThrough();
+
+ this.normalizeCRLFHeaders = gl.utils.normalizeCRLFHeaders(this.CLRFHeaders);
+ });
+
+ it('should split by newline', function () {
+ expect(String.prototype.split).toHaveBeenCalledWith('\n');
+ });
+
+ it('should split by colon+space for each header', function () {
+ expect(String.prototype.split.calls.allArgs().filter(args => args[0] === ': ').length).toBe(3);
+ });
+
+ it('should call gl.utils.normalizeHeaders with a parsed headers object', function () {
+ expect(gl.utils.normalizeHeaders).toHaveBeenCalledWith(jasmine.any(Object));
+ });
+
+ it('should return a normalized headers object', function () {
+ expect(this.normalizeCRLFHeaders).toEqual({
+ 'A-HEADER': 'a-value',
+ 'ANOTHER-HEADER': 'ANOTHER-VALUE',
+ 'LAST-HEADER': 'last-VALUE',
+ });
+ });
+ });
+
describe('gl.utils.parseIntPagination', () => {
it('should parse to integers all string values and return pagination object', () => {
const pagination = {
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 9ed43da1116..d4b7684adfd 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -771,8 +771,8 @@ describe Gitlab::Git::Repository, seed_helper: true do
commits = repository.log(options)
expect(commits.size).to be > 0
- satisfy do
- commits.all? { |commit| commit.created_at >= options[:after] }
+ expect(commits).to satisfy do |commits|
+ commits.all? { |commit| commit.time >= options[:after] }
end
end
end
@@ -784,8 +784,27 @@ describe Gitlab::Git::Repository, seed_helper: true do
commits = repository.log(options)
expect(commits.size).to be > 0
- satisfy do
- commits.all? { |commit| commit.created_at <= options[:before] }
+ expect(commits).to satisfy do |commits|
+ commits.all? { |commit| commit.time <= options[:before] }
+ end
+ end
+ end
+
+ context 'when multiple paths are provided' do
+ let(:options) { { ref: 'master', path: ['PROCESS.md', 'README.md'] } }
+
+ def commit_files(commit)
+ commit.diff(commit.parent_ids.first).deltas.flat_map do |delta|
+ [delta.old_file[:path], delta.new_file[:path]].uniq.compact
+ end
+ end
+
+ it 'only returns commits matching at least one path' do
+ commits = repository.log(options)
+
+ expect(commits.size).to be > 0
+ expect(commits).to satisfy do |commits|
+ commits.none? { |commit| (commit_files(commit) & options[:path]).empty? }
end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index ddeb71730e7..002cffd3062 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -29,6 +29,7 @@ notes:
- resolved_by
- todos
- events
+- system_note_metadata
label_links:
- target
- label
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 4b72eb2eaa3..f60c5ffb32a 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -184,6 +184,7 @@ describe Notify do
end
context 'for merge requests' do
+ let(:project) { create(:project, :repository) }
let(:merge_author) { create(:user) }
let(:merge_request) { create(:merge_request, author: current_user, assignee: assignee, source_project: project, target_project: project) }
let(:merge_request_with_description) { create(:merge_request, author: current_user, assignee: assignee, source_project: project, target_project: project, description: FFaker::Lorem.sentence) }
@@ -334,7 +335,7 @@ describe Notify do
end
describe 'project was moved' do
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
let(:user) { create(:user) }
subject { Notify.project_was_moved_email(project.id, user.id, "gitlab/gitlab") }
@@ -460,7 +461,7 @@ describe Notify do
end
describe 'project invitation' do
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
let(:master) { create(:user).tap { |u| project.team << [u, :master] } }
let(:project_member) { invite_to_project(project, inviter: master) }
@@ -480,7 +481,7 @@ describe Notify do
end
describe 'project invitation accepted' do
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
let(:invited_user) { create(:user, name: 'invited user') }
let(:master) { create(:user).tap { |u| project.team << [u, :master] } }
let(:project_member) do
@@ -505,7 +506,7 @@ describe Notify do
end
describe 'project invitation declined' do
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
let(:master) { create(:user).tap { |u| project.team << [u, :master] } }
let(:project_member) do
invitee = invite_to_project(project, inviter: master)
@@ -569,6 +570,7 @@ describe Notify do
end
describe 'on a commit' do
+ let(:project) { create(:project, :repository) }
let(:commit) { project.commit }
before(:each) { allow(note).to receive(:noteable).and_return(commit) }
@@ -636,6 +638,7 @@ describe Notify do
end
context 'items that are noteable, emails for a note on a diff' do
+ let(:project) { create(:project, :repository) }
let(:note_author) { create(:user, name: 'author_name') }
before :each do
@@ -977,6 +980,7 @@ describe Notify do
end
describe 'email on push with multiple commits' do
+ let(:project) { create(:project, :repository) }
let(:example_site_path) { root_path }
let(:user) { create(:user) }
let(:raw_compare) { Gitlab::Git::Compare.new(project.repository.raw_repository, sample_image_commit.id, sample_commit.id) }
@@ -1070,6 +1074,7 @@ describe Notify do
end
describe 'email on push with a single commit' do
+ let(:project) { create(:project, :repository) }
let(:example_site_path) { root_path }
let(:user) { create(:user) }
let(:raw_compare) { Gitlab::Git::Compare.new(project.repository.raw_repository, sample_commit.parent_id, sample_commit.id) }
@@ -1102,7 +1107,7 @@ describe Notify do
end
describe 'HTML emails setting' do
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
let(:user) { create(:user) }
let(:multipart_mail) { Notify.project_was_moved_email(project.id, user.id, "gitlab/gitlab") }
diff --git a/spec/migrations/migrate_process_commit_worker_jobs_spec.rb b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb
index b6d678bac18..3db57595fa6 100644
--- a/spec/migrations/migrate_process_commit_worker_jobs_spec.rb
+++ b/spec/migrations/migrate_process_commit_worker_jobs_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('db', 'migrate', '20161124141322_migrate_process_commit_worker_jobs.rb')
describe MigrateProcessCommitWorkerJobs do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:commit) { project.commit.raw.raw_commit }
diff --git a/spec/models/system_note_metadata_spec.rb b/spec/models/system_note_metadata_spec.rb
new file mode 100644
index 00000000000..d238e28209a
--- /dev/null
+++ b/spec/models/system_note_metadata_spec.rb
@@ -0,0 +1,27 @@
+require 'spec_helper'
+
+describe SystemNoteMetadata, models: true do
+ describe 'associations' do
+ it { is_expected.to belong_to(:note) }
+ end
+
+ describe 'validation' do
+ it { is_expected.to validate_presence_of(:note) }
+
+ context 'when action type is invalid' do
+ subject do
+ build(:system_note_metadata, note: build(:note), action: 'invalid_type' )
+ end
+
+ it { is_expected.to be_invalid }
+ end
+
+ context 'when action type is valid' do
+ subject do
+ build(:system_note_metadata, note: build(:note), action: 'merge' )
+ end
+
+ it { is_expected.to be_valid }
+ end
+ end
+end
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 7591bfd1471..2905d5b26a5 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -5,7 +5,7 @@ describe IssuePolicy, models: true do
describe '#rules' do
context 'using a regular issue' do
- let(:project) { create(:project, :public) }
+ let(:project) { create(:empty_project, :public) }
let(:issue) { create(:issue, project: project) }
let(:policies) { described_class.abilities(user, issue).to_set }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 0a5edf35f59..064847ee3dc 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -74,7 +74,7 @@ describe ProjectPolicy, models: true do
end
it 'does not include the read_issue permission when the issue author is not a member of the private project' do
- project = create(:project, :private)
+ project = create(:empty_project, :private)
issue = create(:issue, project: project)
user = issue.author
diff --git a/spec/serializers/analytics_issue_serializer_spec.rb b/spec/serializers/analytics_issue_serializer_spec.rb
index 2f08958a783..ba24cf8e481 100644
--- a/spec/serializers/analytics_issue_serializer_spec.rb
+++ b/spec/serializers/analytics_issue_serializer_spec.rb
@@ -8,7 +8,7 @@ describe AnalyticsIssueSerializer do
end
let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
let(:resource) do
{
total_time: "172802.724419",
diff --git a/spec/serializers/analytics_merge_request_serializer_spec.rb b/spec/serializers/analytics_merge_request_serializer_spec.rb
index 62067cc0ef2..56cb08acfc6 100644
--- a/spec/serializers/analytics_merge_request_serializer_spec.rb
+++ b/spec/serializers/analytics_merge_request_serializer_spec.rb
@@ -8,7 +8,7 @@ describe AnalyticsMergeRequestSerializer do
end
let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
let(:resource) do
{
total_time: "172802.724419",
diff --git a/spec/serializers/commit_entity_spec.rb b/spec/serializers/commit_entity_spec.rb
index 0333d73b5b5..04247c78549 100644
--- a/spec/serializers/commit_entity_spec.rb
+++ b/spec/serializers/commit_entity_spec.rb
@@ -6,7 +6,7 @@ describe CommitEntity do
end
let(:request) { double('request') }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:commit) { project.commit }
subject { entity.as_json }
diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb
index 57728ce3181..979d9921941 100644
--- a/spec/serializers/environment_entity_spec.rb
+++ b/spec/serializers/environment_entity_spec.rb
@@ -15,4 +15,24 @@ describe EnvironmentEntity do
it 'exposes core elements of environment' do
expect(subject).to include(:id, :name, :state, :environment_path)
end
+
+ context 'metrics disabled' do
+ before do
+ allow(environment).to receive(:has_metrics?).and_return(false)
+ end
+
+ it "doesn't expose metrics path" do
+ expect(subject).not_to include(:metrics_path)
+ end
+ end
+
+ context 'metrics enabled' do
+ before do
+ allow(environment).to receive(:has_metrics?).and_return(true)
+ end
+
+ it 'exposes metrics path' do
+ expect(subject).to include(:metrics_path)
+ end
+ end
end
diff --git a/spec/serializers/environment_serializer_spec.rb b/spec/serializers/environment_serializer_spec.rb
index 6a6df377b35..1909e6385b5 100644
--- a/spec/serializers/environment_serializer_spec.rb
+++ b/spec/serializers/environment_serializer_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe EnvironmentSerializer do
let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
let(:json) do
described_class
@@ -11,21 +11,20 @@ describe EnvironmentSerializer do
end
context 'when there is a single object provided' do
- before do
- create(:ci_build, :manual, name: 'manual1',
- pipeline: deployable.pipeline)
- end
-
+ let(:project) { create(:project, :repository) }
+ let(:deployable) { create(:ci_build) }
let(:deployment) do
create(:deployment, deployable: deployable,
user: user,
project: project,
sha: project.commit.id)
end
-
- let(:deployable) { create(:ci_build) }
let(:resource) { deployment.environment }
+ before do
+ create(:ci_build, :manual, name: 'manual1', pipeline: deployable.pipeline)
+ end
+
it 'contains important elements of environment' do
expect(json)
.to include(:name, :external_url, :environment_path, :last_deployment)
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index fa472f3e2c3..5b324f3c706 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -13,6 +13,7 @@ describe Issues::UpdateService, services: true do
let(:issue) do
create(:issue, title: 'Old title',
+ description: "for #{user2.to_reference}",
assignee_id: user3.id,
project: project)
end
@@ -182,16 +183,24 @@ describe Issues::UpdateService, services: true do
it 'marks pending todos as done' do
expect(todo.reload.done?).to eq true
end
+
+ it 'does not create any new todos' do
+ expect(Todo.count).to eq(1)
+ end
end
context 'when the description change' do
before do
- update_issue(description: 'Also please fix')
+ update_issue(description: "Also please fix #{user2.to_reference} #{user3.to_reference}")
end
it 'marks todos as done' do
expect(todo.reload.done?).to eq true
end
+
+ it 'creates only 1 new todo' do
+ expect(Todo.count).to eq(2)
+ end
end
context 'when is reassigned' do
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index ad3d767f193..f2ca1e6fcbd 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -12,6 +12,7 @@ describe MergeRequests::UpdateService, services: true do
let(:merge_request) do
create(:merge_request, :simple, title: 'Old title',
+ description: "FYI #{user2.to_reference}",
assignee_id: user3.id,
source_project: project)
end
@@ -225,16 +226,24 @@ describe MergeRequests::UpdateService, services: true do
it 'marks pending todos as done' do
expect(pending_todo.reload).to be_done
end
+
+ it 'does not create any new todos' do
+ expect(Todo.count).to eq(1)
+ end
end
context 'when the description change' do
before do
- update_merge_request({ description: 'Also please fix' })
+ update_merge_request({ description: "Also please fix #{user2.to_reference} #{user3.to_reference}" })
end
it 'marks pending todos as done' do
expect(pending_todo.reload).to be_done
end
+
+ it 'creates only 1 new todo' do
+ expect(Todo.count).to eq(2)
+ end
end
context 'when is reassigned' do
diff --git a/spec/services/note_summary_spec.rb b/spec/services/note_summary_spec.rb
new file mode 100644
index 00000000000..39f06f8f8e7
--- /dev/null
+++ b/spec/services/note_summary_spec.rb
@@ -0,0 +1,44 @@
+require 'spec_helper'
+
+describe NoteSummary, services: true do
+ let(:project) { build(:empty_project) }
+ let(:noteable) { build(:issue) }
+ let(:user) { build(:user) }
+
+ def create_note_summary
+ described_class.new(noteable, project, user, 'note', action: 'icon', commit_count: 5)
+ end
+
+ describe '#metadata?' do
+ it 'returns true when metadata present' do
+ expect(create_note_summary.metadata?).to be_truthy
+ end
+
+ it 'returns false when metadata not present' do
+ expect(described_class.new(noteable, project, user, 'note').metadata?).to be_falsey
+ end
+ end
+
+ describe '#note' do
+ it 'returns note hash' do
+ expect(create_note_summary.note).to eq(noteable: noteable, project: project, author: user, note: 'note')
+ end
+
+ context 'when noteable is a commit' do
+ let(:noteable) { build(:commit) }
+
+ it 'returns note hash specific to commit' do
+ expect(create_note_summary.note).to eq(
+ noteable: nil, project: project, author: user, note: 'note',
+ noteable_type: 'Commit', commit_id: noteable.id
+ )
+ end
+ end
+ end
+
+ describe '#metadata' do
+ it 'returns metadata hash' do
+ expect(create_note_summary.metadata).to eq(action: 'icon', commit_count: 5)
+ end
+ end
+end
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index dde4bde7dc2..905e2f46bde 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -4,12 +4,14 @@ describe Notes::UpdateService, services: true do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
let(:user2) { create(:user) }
+ let(:user3) { create(:user) }
let(:issue) { create(:issue, project: project) }
- let(:note) { create(:note, project: project, noteable: issue, author: user, note: 'Old note') }
+ let(:note) { create(:note, project: project, noteable: issue, author: user, note: "Old note #{user2.to_reference}") }
before do
project.team << [user, :master]
project.team << [user2, :developer]
+ project.team << [user3, :developer]
end
describe '#execute' do
@@ -23,22 +25,30 @@ describe Notes::UpdateService, services: true do
context 'when the note change' do
before do
- update_note({ note: 'New note' })
+ update_note({ note: "New note #{user2.to_reference} #{user3.to_reference}" })
end
it 'marks todos as done' do
expect(todo.reload).to be_done
end
+
+ it 'creates only 1 new todo' do
+ expect(Todo.count).to eq(2)
+ end
end
context 'when the note does not change' do
before do
- update_note({ note: 'Old note' })
+ update_note({ note: "Old note #{user2.to_reference}" })
end
it 'keep todos' do
expect(todo.reload).to be_pending
end
+
+ it 'does not create any new todos' do
+ expect(Todo.count).to eq(1)
+ end
end
end
end
diff --git a/spec/services/search/global_service_spec.rb b/spec/services/search/global_service_spec.rb
new file mode 100644
index 00000000000..2531607acad
--- /dev/null
+++ b/spec/services/search/global_service_spec.rb
@@ -0,0 +1,66 @@
+require 'spec_helper'
+
+describe Search::GlobalService, services: true do
+ let(:user) { create(:user) }
+ let(:internal_user) { create(:user) }
+
+ let!(:found_project) { create(:empty_project, :private, name: 'searchable_project') }
+ let!(:unfound_project) { create(:empty_project, :private, name: 'unfound_project') }
+ let!(:internal_project) { create(:empty_project, :internal, name: 'searchable_internal_project') }
+ let!(:public_project) { create(:empty_project, :public, name: 'searchable_public_project') }
+
+ before do
+ found_project.add_master(user)
+ end
+
+ describe '#execute' do
+ context 'unauthenticated' do
+ it 'returns public projects only' do
+ results = Search::GlobalService.new(nil, search: "searchable").execute
+
+ expect(results.objects('projects')).to match_array [public_project]
+ end
+ end
+
+ context 'authenticated' do
+ it 'returns public, internal and private projects' do
+ results = Search::GlobalService.new(user, search: "searchable").execute
+
+ expect(results.objects('projects')).to match_array [public_project, found_project, internal_project]
+ end
+
+ it 'returns only public & internal projects' do
+ results = Search::GlobalService.new(internal_user, search: "searchable").execute
+
+ expect(results.objects('projects')).to match_array [internal_project, public_project]
+ end
+
+ it 'namespace name is searchable' do
+ results = Search::GlobalService.new(user, search: found_project.namespace.path).execute
+
+ expect(results.objects('projects')).to match_array [found_project]
+ end
+
+ context 'nested group' do
+ let!(:nested_group) { create(:group, :nested) }
+ let!(:project) { create(:empty_project, namespace: nested_group) }
+
+ before do
+ project.add_master(user)
+ end
+
+ it 'returns result from nested group' do
+ results = Search::GlobalService.new(user, search: project.path).execute
+
+ expect(results.objects('projects')).to match_array [project]
+ end
+
+ it 'returns result from descendants when search inside group' do
+ results = Search::GlobalService.new(user, search: project.path, group_id: nested_group.parent).execute
+
+ expect(results.objects('projects')).to match_array [project]
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index 6ef5fa008aa..2112f1cf9ea 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -1,65 +1,286 @@
require 'spec_helper'
-describe 'Search::GlobalService', services: true do
+describe SearchService, services: true do
let(:user) { create(:user) }
- let(:public_user) { create(:user) }
- let(:internal_user) { create(:user) }
- let!(:found_project) { create(:empty_project, :private, name: 'searchable_project') }
- let!(:unfound_project) { create(:empty_project, :private, name: 'unfound_project') }
- let!(:internal_project) { create(:empty_project, :internal, name: 'searchable_internal_project') }
- let!(:public_project) { create(:empty_project, :public, name: 'searchable_public_project') }
+ let(:accessible_group) { create(:group, :private) }
+ let(:inaccessible_group) { create(:group, :private) }
+ let!(:group_member) { create(:group_member, group: accessible_group, user: user) }
+
+ let!(:accessible_project) { create(:empty_project, :private, name: 'accessible_project') }
+ let!(:inaccessible_project) { create(:empty_project, :private, name: 'inaccessible_project') }
+ let(:note) { create(:note_on_issue, project: accessible_project) }
+
+ let(:snippet) { create(:snippet, author: user) }
+ let(:group_project) { create(:empty_project, group: accessible_group, name: 'group_project') }
+ let(:public_project) { create(:empty_project, :public, name: 'public_project') }
before do
- found_project.team << [user, :master]
+ accessible_project.add_master(user)
+ end
+
+ describe '#project' do
+ context 'when the project is accessible' do
+ it 'returns the project' do
+ project = SearchService.new(user, project_id: accessible_project.id).project
+
+ expect(project).to eq accessible_project
+ end
+ end
+
+ context 'when the project is not accessible' do
+ it 'returns nil' do
+ project = SearchService.new(user, project_id: inaccessible_project.id).project
+
+ expect(project).to be_nil
+ end
+ end
+
+ context 'when there is no project_id' do
+ it 'returns nil' do
+ project = SearchService.new(user).project
+
+ expect(project).to be_nil
+ end
+ end
end
- describe '#execute' do
- context 'unauthenticated' do
- it 'returns public projects only' do
- context = Search::GlobalService.new(nil, search: "searchable")
- results = context.execute
- expect(results.objects('projects')).to match_array [public_project]
+ describe '#group' do
+ context 'when the group is accessible' do
+ it 'returns the group' do
+ group = SearchService.new(user, group_id: accessible_group.id).group
+
+ expect(group).to eq accessible_group
end
end
- context 'authenticated' do
- it 'returns public, internal and private projects' do
- context = Search::GlobalService.new(user, search: "searchable")
- results = context.execute
- expect(results.objects('projects')).to match_array [public_project, found_project, internal_project]
+ context 'when the group is not accessible' do
+ it 'returns nil' do
+ group = SearchService.new(user, group_id: inaccessible_group.id).group
+
+ expect(group).to be_nil
end
+ end
+
+ context 'when there is no group_id' do
+ it 'returns nil' do
+ group = SearchService.new(user).group
- it 'returns only public & internal projects' do
- context = Search::GlobalService.new(internal_user, search: "searchable")
- results = context.execute
- expect(results.objects('projects')).to match_array [internal_project, public_project]
+ expect(group).to be_nil
end
+ end
+ end
+
+ describe '#show_snippets?' do
+ context 'when :snippets is \'true\'' do
+ it 'returns true' do
+ show_snippets = SearchService.new(user, snippets: 'true').show_snippets?
- it 'namespace name is searchable' do
- context = Search::GlobalService.new(user, search: found_project.namespace.path)
- results = context.execute
- expect(results.objects('projects')).to match_array [found_project]
+ expect(show_snippets).to be_truthy
end
+ end
- context 'nested group' do
- let!(:nested_group) { create(:group, :nested) }
- let!(:project) { create(:empty_project, namespace: nested_group) }
+ context 'when :snippets is not \'true\'' do
+ it 'returns false' do
+ show_snippets = SearchService.new(user, snippets: 'tru').show_snippets?
+
+ expect(show_snippets).to be_falsey
+ end
+ end
- before { project.add_master(user) }
+ context 'when :snippets is missing' do
+ it 'returns false' do
+ show_snippets = SearchService.new(user).show_snippets?
- it 'returns result from nested group' do
- context = Search::GlobalService.new(user, search: project.path)
- results = context.execute
- expect(results.objects('projects')).to match_array [project]
+ expect(show_snippets).to be_falsey
+ end
+ end
+ end
+
+ describe '#scope' do
+ context 'with accessible project_id' do
+ context 'and allowed scope' do
+ it 'returns the specified scope' do
+ scope = SearchService.new(user, project_id: accessible_project.id, scope: 'notes').scope
+
+ expect(scope).to eq 'notes'
end
+ end
+
+ context 'and disallowed scope' do
+ it 'returns the default scope' do
+ scope = SearchService.new(user, project_id: accessible_project.id, scope: 'projects').scope
- it 'returns result from descendants when search inside group' do
- context = Search::GlobalService.new(user, search: project.path, group_id: nested_group.parent)
- results = context.execute
- expect(results.objects('projects')).to match_array [project]
+ expect(scope).to eq 'blobs'
end
end
+
+ context 'and no scope' do
+ it 'returns the default scope' do
+ scope = SearchService.new(user, project_id: accessible_project.id).scope
+
+ expect(scope).to eq 'blobs'
+ end
+ end
+ end
+
+ context 'with \'true\' snippets' do
+ context 'and allowed scope' do
+ it 'returns the specified scope' do
+ scope = SearchService.new(user, snippets: 'true', scope: 'snippet_titles').scope
+
+ expect(scope).to eq 'snippet_titles'
+ end
+ end
+
+ context 'and disallowed scope' do
+ it 'returns the default scope' do
+ scope = SearchService.new(user, snippets: 'true', scope: 'projects').scope
+
+ expect(scope).to eq 'snippet_blobs'
+ end
+ end
+
+ context 'and no scope' do
+ it 'returns the default scope' do
+ scope = SearchService.new(user, snippets: 'true').scope
+
+ expect(scope).to eq 'snippet_blobs'
+ end
+ end
+ end
+
+ context 'with no project_id, no snippets' do
+ context 'and allowed scope' do
+ it 'returns the specified scope' do
+ scope = SearchService.new(user, scope: 'issues').scope
+
+ expect(scope).to eq 'issues'
+ end
+ end
+
+ context 'and disallowed scope' do
+ it 'returns the default scope' do
+ scope = SearchService.new(user, scope: 'blobs').scope
+
+ expect(scope).to eq 'projects'
+ end
+ end
+
+ context 'and no scope' do
+ it 'returns the default scope' do
+ scope = SearchService.new(user).scope
+
+ expect(scope).to eq 'projects'
+ end
+ end
+ end
+ end
+
+ describe '#search_results' do
+ context 'with accessible project_id' do
+ it 'returns an instance of Gitlab::ProjectSearchResults' do
+ search_results = SearchService.new(
+ user,
+ project_id: accessible_project.id,
+ scope: 'notes',
+ search: note.note).search_results
+
+ expect(search_results).to be_a Gitlab::ProjectSearchResults
+ end
+ end
+
+ context 'with accessible project_id and \'true\' snippets' do
+ it 'returns an instance of Gitlab::ProjectSearchResults' do
+ search_results = SearchService.new(
+ user,
+ project_id: accessible_project.id,
+ snippets: 'true',
+ scope: 'notes',
+ search: note.note).search_results
+
+ expect(search_results).to be_a Gitlab::ProjectSearchResults
+ end
+ end
+
+ context 'with \'true\' snippets' do
+ it 'returns an instance of Gitlab::SnippetSearchResults' do
+ search_results = SearchService.new(
+ user,
+ snippets: 'true',
+ search: snippet.content).search_results
+
+ expect(search_results).to be_a Gitlab::SnippetSearchResults
+ end
+ end
+
+ context 'with no project_id and no snippets' do
+ it 'returns an instance of Gitlab::SearchResults' do
+ search_results = SearchService.new(
+ user,
+ search: public_project.name).search_results
+
+ expect(search_results).to be_a Gitlab::SearchResults
+ end
+ end
+ end
+
+ describe '#search_objects' do
+ context 'with accessible project_id' do
+ it 'returns objects in the project' do
+ search_objects = SearchService.new(
+ user,
+ project_id: accessible_project.id,
+ scope: 'notes',
+ search: note.note).search_objects
+
+ expect(search_objects.first).to eq note
+ end
+ end
+
+ context 'with accessible project_id and \'true\' snippets' do
+ it 'returns objects in the project' do
+ search_objects = SearchService.new(
+ user,
+ project_id: accessible_project.id,
+ snippets: 'true',
+ scope: 'notes',
+ search: note.note).search_objects
+
+ expect(search_objects.first).to eq note
+ end
+ end
+
+ context 'with \'true\' snippets' do
+ it 'returns objects in snippets' do
+ search_objects = SearchService.new(
+ user,
+ snippets: 'true',
+ search: snippet.content).search_objects
+
+ expect(search_objects.first).to eq snippet
+ end
+ end
+
+ context 'with accessible group_id' do
+ it 'returns objects in the group' do
+ search_objects = SearchService.new(
+ user,
+ group_id: accessible_group.id,
+ search: group_project.name).search_objects
+
+ expect(search_objects.first).to eq group_project
+ end
+ end
+
+ context 'with no project_id, group_id or snippets' do
+ it 'returns objects in global' do
+ search_objects = SearchService.new(
+ user,
+ search: public_project.name).search_objects
+
+ expect(search_objects.first).to eq public_project
+ end
end
end
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index d7bf4c39cc0..90cde705b85 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -8,12 +8,15 @@ describe SystemNoteService, services: true do
let(:noteable) { create(:issue, project: project) }
shared_examples_for 'a system note' do
+ let(:expected_noteable) { noteable }
+ let(:commit_count) { nil }
+
it 'is valid' do
expect(subject).to be_valid
end
it 'sets the noteable model' do
- expect(subject.noteable).to eq noteable
+ expect(subject.noteable).to eq expected_noteable
end
it 'sets the project' do
@@ -27,6 +30,19 @@ describe SystemNoteService, services: true do
it 'is a system note' do
expect(subject).to be_system
end
+
+ context 'metadata' do
+ it 'creates a new system note metadata record' do
+ expect { subject }.to change{ SystemNoteMetadata.count }.from(0).to(1)
+ end
+
+ it 'creates a record correctly' do
+ metadata = subject.system_note_metadata
+
+ expect(metadata.commit_count).to eq(commit_count)
+ expect(metadata.action).to eq(action)
+ end
+ end
end
describe '.add_commits' do
@@ -38,7 +54,10 @@ describe SystemNoteService, services: true do
let(:old_commits) { [] }
let(:oldrev) { nil }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:commit_count) { new_commits.size }
+ let(:action) { 'commit' }
+ end
describe 'note body' do
let(:note_lines) { subject.note.split("\n").reject(&:blank?) }
@@ -117,7 +136,9 @@ describe SystemNoteService, services: true do
let(:assignee) { create(:user) }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'assignee' }
+ end
context 'when assignee added' do
it 'sets the note text' do
@@ -141,7 +162,9 @@ describe SystemNoteService, services: true do
let(:added) { [] }
let(:removed) { [] }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'label' }
+ end
context 'with added labels' do
let(:added) { labels }
@@ -176,7 +199,9 @@ describe SystemNoteService, services: true do
let(:milestone) { create(:milestone, project: project) }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'milestone' }
+ end
context 'when milestone added' do
it 'sets the note text' do
@@ -199,7 +224,9 @@ describe SystemNoteService, services: true do
let(:status) { 'new_status' }
let(:source) { nil }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'status' }
+ end
context 'with a source' do
let(:source) { double('commit', gfm_reference: 'commit 123456') }
@@ -225,7 +252,9 @@ describe SystemNoteService, services: true do
subject { described_class.merge_when_pipeline_succeeds(noteable, project, author, noteable.diff_head_commit) }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'merge' }
+ end
it "posts the 'merge when pipeline succeeds' system note" do
expect(subject.note).to match(/enabled an automatic merge when the pipeline for (\w+\/\w+@)?\h{40} succeeds/)
@@ -240,7 +269,9 @@ describe SystemNoteService, services: true do
subject { described_class.cancel_merge_when_pipeline_succeeds(noteable, project, author) }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'merge' }
+ end
it "posts the 'merge when pipeline succeeds' system note" do
expect(subject.note).to eq "canceled the automatic merge"
@@ -253,7 +284,9 @@ describe SystemNoteService, services: true do
subject { described_class.change_title(noteable, project, author, 'Old title') }
context 'when noteable responds to `title`' do
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'title' }
+ end
it 'sets the note text' do
expect(subject.note).
@@ -266,7 +299,9 @@ describe SystemNoteService, services: true do
subject { described_class.change_issue_confidentiality(noteable, project, author) }
context 'when noteable responds to `confidential`' do
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'confidentiality' }
+ end
it 'sets the note text' do
expect(subject.note).to eq 'made the issue visible to everyone'
@@ -281,7 +316,9 @@ describe SystemNoteService, services: true do
let(:old_branch) { 'old_branch'}
let(:new_branch) { 'new_branch'}
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'branch' }
+ end
context 'when target branch name changed' do
it 'sets the note text' do
@@ -295,7 +332,9 @@ describe SystemNoteService, services: true do
let(:project) { create(:project, :repository) }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'branch' }
+ end
context 'when source branch deleted' do
it 'sets the note text' do
@@ -309,7 +348,9 @@ describe SystemNoteService, services: true do
let(:project) { create(:project, :repository) }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'branch' }
+ end
context 'when a branch is created from the new branch button' do
it 'sets the note text' do
@@ -323,7 +364,9 @@ describe SystemNoteService, services: true do
let(:mentioner) { create(:issue, project: project) }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'cross_reference' }
+ end
context 'when cross-reference disallowed' do
before do
@@ -333,6 +376,10 @@ describe SystemNoteService, services: true do
it 'returns nil' do
expect(subject).to be_nil
end
+
+ it 'does not create a system note metadata record' do
+ expect { subject }.not_to change{ SystemNoteMetadata.count }
+ end
end
context 'when cross-reference allowed' do
@@ -340,6 +387,10 @@ describe SystemNoteService, services: true do
expect(described_class).to receive(:cross_reference_disallowed?).and_return(false)
end
+ it_behaves_like 'a system note' do
+ let(:action) { 'cross_reference' }
+ end
+
describe 'note_body' do
context 'cross-project' do
let(:project2) { create(:project, :repository) }
@@ -552,6 +603,9 @@ describe SystemNoteService, services: true do
let(:direction) { :to }
it_behaves_like 'cross project mentionable'
+ it_behaves_like 'a system note' do
+ let(:action) { 'moved' }
+ end
it 'notifies about noteable being moved to' do
expect(subject.note).to match('moved to')
@@ -562,6 +616,9 @@ describe SystemNoteService, services: true do
let(:direction) { :from }
it_behaves_like 'cross project mentionable'
+ it_behaves_like 'a system note' do
+ let(:action) { 'moved' }
+ end
it 'notifies about noteable being moved from' do
expect(subject.note).to match('moved from')
@@ -732,33 +789,34 @@ describe SystemNoteService, services: true do
let(:merge_request) { discussion.noteable }
let(:project) { merge_request.source_project }
let(:issue) { create(:issue, project: project) }
- let(:user) { create(:user) }
def reloaded_merge_request
MergeRequest.find(merge_request.id)
end
- before do
- project.team << [user, :developer]
+ subject { described_class.discussion_continued_in_issue(discussion, project, author, issue) }
+
+ it_behaves_like 'a system note' do
+ let(:expected_noteable) { discussion.first_note.noteable }
+ let(:action) { 'discussion' }
end
it 'creates a new note in the discussion' do
# we need to completely rebuild the merge request object, or the `@discussions` on the merge request are not reloaded.
- expect { SystemNoteService.discussion_continued_in_issue(discussion, project, user, issue) }.
- to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
+ expect { subject }.to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
end
it 'mentions the created issue in the system note' do
- note = SystemNoteService.discussion_continued_in_issue(discussion, project, user, issue)
-
- expect(note.note).to include(issue.to_reference)
+ expect(subject.note).to include(issue.to_reference)
end
end
describe '.change_time_estimate' do
subject { described_class.change_time_estimate(noteable, project, author) }
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'time_tracking' }
+ end
context 'with a time estimate' do
it 'sets the note text' do
@@ -788,7 +846,9 @@ describe SystemNoteService, services: true do
described_class.change_time_spent(noteable, project, author)
end
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'time_tracking' }
+ end
context 'when time was added' do
it 'sets the note text' do
@@ -820,6 +880,34 @@ describe SystemNoteService, services: true do
end
end
+ describe '.remove_merge_request_wip' do
+ let(:noteable) { create(:issue, project: project, title: 'WIP: Lorem ipsum') }
+
+ subject { described_class.remove_merge_request_wip(noteable, project, author) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'title' }
+ end
+
+ it 'sets the note text' do
+ expect(subject.note).to eq 'unmarked as a **Work In Progress**'
+ end
+ end
+
+ describe '.add_merge_request_wip' do
+ let(:noteable) { create(:issue, project: project, title: 'Lorem ipsum') }
+
+ subject { described_class.add_merge_request_wip(noteable, project, author) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'title' }
+ end
+
+ it 'sets the note text' do
+ expect(subject.note).to eq 'marked as a **Work In Progress**'
+ end
+ end
+
describe '.add_merge_request_wip_from_commit' do
let(:project) { create(:project, :repository) }
let(:noteable) do
@@ -835,7 +923,9 @@ describe SystemNoteService, services: true do
)
end
- it_behaves_like 'a system note'
+ it_behaves_like 'a system note' do
+ let(:action) { 'title' }
+ end
it "posts the 'marked as a Work In Progress from commit' system note" do
expect(subject.note).to match(
@@ -843,4 +933,33 @@ describe SystemNoteService, services: true do
)
end
end
+
+ describe '.change_task_status' do
+ let(:noteable) { create(:issue, project: project) }
+ let(:task) { double(:task, complete?: true, source: 'task') }
+
+ subject { described_class.change_task_status(noteable, project, author, task) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'task' }
+ end
+
+ it "posts the 'marked as a Work In Progress from commit' system note" do
+ expect(subject.note).to eq("marked the task **task** as completed")
+ end
+ end
+
+ describe '.resolve_all_discussions' do
+ let(:noteable) { create(:merge_request, source_project: project, target_project: project) }
+
+ subject { described_class.resolve_all_discussions(noteable, project, author) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'discussion' }
+ end
+
+ it 'sets the note text' do
+ expect(subject.note).to eq 'resolved all discussions'
+ end
+ end
end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index f9e432bb216..89b3b6aad10 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -8,10 +8,12 @@ describe TodoService, services: true do
let(:guest) { create(:user) }
let(:admin) { create(:admin) }
let(:john_doe) { create(:user) }
+ let(:skipped) { create(:user) }
+ let(:skip_users) { [skipped] }
let(:project) { create(:empty_project) }
- let(:mentions) { 'FYI: ' + [author, assignee, john_doe, member, guest, non_member, admin].map(&:to_reference).join(' ') }
- let(:directly_addressed) { [author, assignee, john_doe, member, guest, non_member, admin].map(&:to_reference).join(' ') }
- let(:directly_addressed_and_mentioned) { member.to_reference + ", what do you think? cc: " + [guest, admin].map(&:to_reference).join(' ') }
+ let(:mentions) { 'FYI: ' + [author, assignee, john_doe, member, guest, non_member, admin, skipped].map(&:to_reference).join(' ') }
+ let(:directly_addressed) { [author, assignee, john_doe, member, guest, non_member, admin, skipped].map(&:to_reference).join(' ') }
+ let(:directly_addressed_and_mentioned) { member.to_reference + ", what do you think? cc: " + [guest, admin, skipped].map(&:to_reference).join(' ') }
let(:service) { described_class.new }
before do
@@ -19,6 +21,7 @@ describe TodoService, services: true do
project.team << [author, :developer]
project.team << [member, :developer]
project.team << [john_doe, :developer]
+ project.team << [skipped, :developer]
end
describe 'Issues' do
@@ -119,46 +122,61 @@ describe TodoService, services: true do
end
describe '#update_issue' do
- it 'creates a todo for each valid mentioned user' do
- service.update_issue(issue, author)
+ it 'creates a todo for each valid mentioned user not included in skip_users' do
+ service.update_issue(issue, author, skip_users)
should_create_todo(user: member, target: issue, action: Todo::MENTIONED)
should_create_todo(user: guest, target: issue, action: Todo::MENTIONED)
should_create_todo(user: john_doe, target: issue, action: Todo::MENTIONED)
should_create_todo(user: author, target: issue, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: issue, action: Todo::MENTIONED)
+ should_not_create_todo(user: skipped, target: issue, action: Todo::MENTIONED)
end
- it 'creates a todo for each valid user based on the type of mention' do
+ it 'creates a todo for each valid user not included in skip_users based on the type of mention' do
issue.update(description: directly_addressed_and_mentioned)
- service.update_issue(issue, author)
+ service.update_issue(issue, author, skip_users)
should_create_todo(user: member, target: issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: guest, target: issue, action: Todo::MENTIONED)
should_create_todo(user: admin, target: issue, action: Todo::MENTIONED)
+ should_not_create_todo(user: skipped, target: issue)
end
- it 'creates a directly addressed todo for each valid addressed user' do
- service.update_issue(addressed_issue, author)
+ it 'creates a directly addressed todo for each valid addressed user not included in skip_users' do
+ service.update_issue(addressed_issue, author, skip_users)
should_create_todo(user: member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: guest, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: john_doe, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: author, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: skipped, target: addressed_issue, action: Todo::DIRECTLY_ADDRESSED)
end
- it 'does not create a todo if user was already mentioned' do
+ it 'does not create a todo if user was already mentioned and todo is pending' do
create(:todo, :mentioned, user: member, project: project, target: issue, author: author)
- expect { service.update_issue(issue, author) }.not_to change(member.todos, :count)
+ expect { service.update_issue(issue, author, skip_users) }.not_to change(member.todos, :count)
+ end
+
+ it 'does not create a todo if user was already mentioned and todo is done' do
+ create(:todo, :mentioned, :done, user: skipped, project: project, target: issue, author: author)
+
+ expect { service.update_issue(issue, author, skip_users) }.not_to change(skipped.todos, :count)
end
- it 'does not create a directly addressed todo if user was already mentioned or addressed' do
+ it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is pending' do
create(:todo, :directly_addressed, user: member, project: project, target: addressed_issue, author: author)
- expect { service.update_issue(addressed_issue, author) }.not_to change(member.todos, :count)
+ expect { service.update_issue(addressed_issue, author, skip_users) }.not_to change(member.todos, :count)
+ end
+
+ it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is done' do
+ create(:todo, :directly_addressed, :done, user: skipped, project: project, target: addressed_issue, author: author)
+
+ expect { service.update_issue(addressed_issue, author, skip_users) }.not_to change(skipped.todos, :count)
end
it 'does not create todo if user can not see the issue when issue is confidential' do
@@ -521,47 +539,62 @@ describe TodoService, services: true do
end
describe '#update_merge_request' do
- it 'creates a todo for each valid mentioned user' do
- service.update_merge_request(mr_assigned, author)
+ it 'creates a todo for each valid mentioned user not included in skip_users' do
+ service.update_merge_request(mr_assigned, author, skip_users)
should_create_todo(user: member, target: mr_assigned, action: Todo::MENTIONED)
should_not_create_todo(user: guest, target: mr_assigned, action: Todo::MENTIONED)
should_create_todo(user: john_doe, target: mr_assigned, action: Todo::MENTIONED)
should_create_todo(user: author, target: mr_assigned, action: Todo::MENTIONED)
should_not_create_todo(user: non_member, target: mr_assigned, action: Todo::MENTIONED)
+ should_not_create_todo(user: skipped, target: mr_assigned, action: Todo::MENTIONED)
end
- it 'creates a todo for each valid user based on the type of mention' do
+ it 'creates a todo for each valid user not included in skip_users based on the type of mention' do
mr_assigned.update(description: directly_addressed_and_mentioned)
- service.update_merge_request(mr_assigned, author)
+ service.update_merge_request(mr_assigned, author, skip_users)
should_create_todo(user: member, target: mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: admin, target: mr_assigned, action: Todo::MENTIONED)
+ should_not_create_todo(user: skipped, target: mr_assigned)
end
- it 'creates a directly addressed todo for each valid addressed user' do
- service.update_merge_request(addressed_mr_assigned, author)
+ it 'creates a directly addressed todo for each valid addressed user not included in skip_users' do
+ service.update_merge_request(addressed_mr_assigned, author, skip_users)
should_create_todo(user: member, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: guest, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: john_doe, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
should_create_todo(user: author, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
should_not_create_todo(user: non_member, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: skipped, target: addressed_mr_assigned, action: Todo::DIRECTLY_ADDRESSED)
end
- it 'does not create a todo if user was already mentioned' do
+ it 'does not create a todo if user was already mentioned and todo is pending' do
create(:todo, :mentioned, user: member, project: project, target: mr_assigned, author: author)
expect { service.update_merge_request(mr_assigned, author) }.not_to change(member.todos, :count)
end
- it 'does not create a directly addressed todo if user was already mentioned or addressed' do
+ it 'does not create a todo if user was already mentioned and todo is done' do
+ create(:todo, :mentioned, :done, user: skipped, project: project, target: mr_assigned, author: author)
+
+ expect { service.update_merge_request(mr_assigned, author, skip_users) }.not_to change(skipped.todos, :count)
+ end
+
+ it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is pending' do
create(:todo, :directly_addressed, user: member, project: project, target: addressed_mr_assigned, author: author)
expect{ service.update_merge_request(addressed_mr_assigned, author) }.not_to change(member.todos, :count)
end
+ it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is done' do
+ create(:todo, :directly_addressed, user: skipped, project: project, target: addressed_mr_assigned, author: author)
+
+ expect{ service.update_merge_request(addressed_mr_assigned, author, skip_users) }.not_to change(skipped.todos, :count)
+ end
+
context 'with a task list' do
it 'does not create todo when tasks are marked as completed' do
mr_assigned.update(description: "- [x] Task 1\n- [X] Task 2 #{mentions}")
@@ -757,6 +790,69 @@ describe TodoService, services: true do
end
end
+ describe '#update_note' do
+ let(:noteable) { create(:issue, project: project) }
+ let(:note) { create(:note, project: project, note: mentions, noteable: noteable) }
+ let(:addressed_note) { create(:note, project: project, note: "#{directly_addressed}", noteable: noteable) }
+
+ it 'creates a todo for each valid mentioned user not included in skip_users' do
+ service.update_note(note, author, skip_users)
+
+ should_create_todo(user: member, target: noteable, action: Todo::MENTIONED)
+ should_create_todo(user: guest, target: noteable, action: Todo::MENTIONED)
+ should_create_todo(user: john_doe, target: noteable, action: Todo::MENTIONED)
+ should_create_todo(user: author, target: noteable, action: Todo::MENTIONED)
+ should_not_create_todo(user: non_member, target: noteable, action: Todo::MENTIONED)
+ should_not_create_todo(user: skipped, target: noteable, action: Todo::MENTIONED)
+ end
+
+ it 'creates a todo for each valid user not included in skip_users based on the type of mention' do
+ note.update(note: directly_addressed_and_mentioned)
+
+ service.update_note(note, author, skip_users)
+
+ should_create_todo(user: member, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: guest, target: noteable, action: Todo::MENTIONED)
+ should_create_todo(user: admin, target: noteable, action: Todo::MENTIONED)
+ should_not_create_todo(user: skipped, target: noteable)
+ end
+
+ it 'creates a directly addressed todo for each valid addressed user not included in skip_users' do
+ service.update_note(addressed_note, author, skip_users)
+
+ should_create_todo(user: member, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: guest, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: john_doe, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
+ should_create_todo(user: author, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: non_member, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
+ should_not_create_todo(user: skipped, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
+ end
+
+ it 'does not create a todo if user was already mentioned and todo is pending' do
+ create(:todo, :mentioned, user: member, project: project, target: noteable, author: author)
+
+ expect { service.update_note(note, author, skip_users) }.not_to change(member.todos, :count)
+ end
+
+ it 'does not create a todo if user was already mentioned and todo is done' do
+ create(:todo, :mentioned, :done, user: skipped, project: project, target: noteable, author: author)
+
+ expect { service.update_note(note, author, skip_users) }.not_to change(skipped.todos, :count)
+ end
+
+ it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is pending' do
+ create(:todo, :directly_addressed, user: member, project: project, target: noteable, author: author)
+
+ expect { service.update_note(addressed_note, author, skip_users) }.not_to change(member.todos, :count)
+ end
+
+ it 'does not create a directly addressed todo if user was already mentioned or addressed and todo is done' do
+ create(:todo, :directly_addressed, :done, user: skipped, project: project, target: noteable, author: author)
+
+ expect { service.update_note(addressed_note, author, skip_users) }.not_to change(skipped.todos, :count)
+ end
+ end
+
it 'updates cached counts when a todo is created' do
issue = create(:issue, project: project, assignee: john_doe, author: author, description: mentions)
diff --git a/spec/services/users/create_service_spec.rb b/spec/services/users/create_service_spec.rb
index 5f79203701a..66f68650f81 100644
--- a/spec/services/users/create_service_spec.rb
+++ b/spec/services/users/create_service_spec.rb
@@ -61,6 +61,23 @@ describe Users::CreateService, services: true do
)
end
+ context 'when the current_user is not persisted' do
+ let(:admin_user) { build(:admin) }
+
+ it 'persists the given attributes and sets created_by_id to nil' do
+ user = service.execute
+ user.reload
+
+ expect(user).to have_attributes(
+ name: params[:name],
+ username: params[:username],
+ email: params[:email],
+ password: params[:password],
+ created_by_id: nil
+ )
+ end
+ end
+
it 'user is not confirmed if skip_confirmation param is not present' do
expect(service.execute).not_to be_confirmed
end
diff --git a/spec/support/controllers/githubish_import_controller_shared_examples.rb b/spec/support/controllers/githubish_import_controller_shared_examples.rb
index d0fd2d52004..51f1015f43c 100644
--- a/spec/support/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/controllers/githubish_import_controller_shared_examples.rb
@@ -228,5 +228,19 @@ shared_examples 'a GitHub-ish import controller: POST create' do
post :create, { new_name: test_name, format: :js }
end
end
+
+ context 'user has chosen a nested namespace and name for the project' do
+ let(:parent_namespace) { create(:namespace, name: 'foo', owner: user) }
+ let(:nested_namespace) { create(:namespace, name: 'bar', parent: parent_namespace, owner: user) }
+ let(:test_name) { 'test_name' }
+
+ it 'takes the selected namespace and name' do
+ expect(Gitlab::GithubImport::ProjectCreator).
+ to receive(:new).with(provider_repo, test_name, nested_namespace, user, access_params, type: provider).
+ and_return(double(execute: true))
+
+ post :create, { target_namespace: nested_namespace.full_path, new_name: test_name, format: :js }
+ end
+ end
end
end
diff --git a/spec/support/notify_shared_examples.rb b/spec/support/notify_shared_examples.rb
index 16a425f2ca2..76411065265 100644
--- a/spec/support/notify_shared_examples.rb
+++ b/spec/support/notify_shared_examples.rb
@@ -3,7 +3,7 @@ shared_context 'gitlab email notification' do
let(:gitlab_sender) { Gitlab.config.gitlab.email_from }
let(:gitlab_sender_reply_to) { Gitlab.config.gitlab.email_reply_to }
let(:recipient) { create(:user, email: 'recipient@example.com') }
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
let(:new_user_address) { 'newguy@example.com' }
before do
diff --git a/spec/support/select2_helper.rb b/spec/support/select2_helper.rb
index 0d526045012..6b1853c2364 100644
--- a/spec/support/select2_helper.rb
+++ b/spec/support/select2_helper.rb
@@ -22,4 +22,12 @@ module Select2Helper
execute_script("$('#{selector}').select2('val', '#{value}').trigger('change');")
end
end
+
+ def open_select2(selector)
+ execute_script("$('#{selector}').select2('open');")
+ end
+
+ def scroll_select2_to_bottom(selector)
+ evaluate_script "$('#{selector}').scrollTop($('#{selector}')[0].scrollHeight); $('#{selector}');"
+ end
end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index 54e222e44bf..78be23bd853 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -40,7 +40,7 @@ module TestEnv
'csv' => '3dd0896',
'v1.1.0' => 'b83d6e3'
}.freeze
-
+
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
# need to keep all the branches in sync.
# We currently only need a subset of the branches
@@ -92,7 +92,7 @@ module TestEnv
tmp_test_path = Rails.root.join('tmp', 'tests', '**')
Dir[tmp_test_path].each do |entry|
- unless File.basename(entry) =~ /\Agitlab-(shell|test|test_bare|test-fork)\z/
+ unless File.basename(entry) =~ /\Agitlab-(shell|test|test_bare|test-fork|test-fork_bare)\z/
FileUtils.rm_rf(entry)
end
end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 665c8fe556a..daea0c6bb37 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -116,7 +116,7 @@ describe 'gitlab:app namespace rake task' do
end
describe 'backup creation and deletion using custom_hooks' do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:user_backup_path) { "repositories/#{project.path_with_namespace}" }
before(:each) do
@@ -224,8 +224,8 @@ describe 'gitlab:app namespace rake task' do
end
context 'multiple repository storages' do
- let(:project_a) { create(:project, repository_storage: 'default') }
- let(:project_b) { create(:project, repository_storage: 'custom') }
+ let(:project_a) { create(:project, :repository, repository_storage: 'default') }
+ let(:project_b) { create(:project, :repository, repository_storage: 'custom') }
before do
FileUtils.mkdir('tmp/tests/default_storage')
diff --git a/spec/views/projects/builds/_build.html.haml_spec.rb b/spec/views/projects/builds/_build.html.haml_spec.rb
index e141a117731..751482cac42 100644
--- a/spec/views/projects/builds/_build.html.haml_spec.rb
+++ b/spec/views/projects/builds/_build.html.haml_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe 'projects/ci/builds/_build' do
include Devise::Test::ControllerHelpers
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_empty_pipeline, id: 1337, project: project, sha: project.commit.id) }
let(:build) { create(:ci_build, pipeline: pipeline, stage: 'test', stage_idx: 1, name: 'rspec 0:2', status: :pending) }
diff --git a/spec/views/projects/builds/_generic_commit_status.html.haml_spec.rb b/spec/views/projects/builds/_generic_commit_status.html.haml_spec.rb
index 49b20e5b36b..dc2ffc9dc47 100644
--- a/spec/views/projects/builds/_generic_commit_status.html.haml_spec.rb
+++ b/spec/views/projects/builds/_generic_commit_status.html.haml_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe 'projects/generic_commit_statuses/_generic_commit_status.html.haml' do
include Devise::Test::ControllerHelpers
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_empty_pipeline, id: 1337, project: project, sha: project.commit.id) }
let(:generic_commit_status) { create(:generic_commit_status, pipeline: pipeline, stage: 'external', name: 'jenkins', stage_idx: 3) }
diff --git a/spec/views/projects/builds/show.html.haml_spec.rb b/spec/views/projects/builds/show.html.haml_spec.rb
index ec78ac30593..55b64808fb3 100644
--- a/spec/views/projects/builds/show.html.haml_spec.rb
+++ b/spec/views/projects/builds/show.html.haml_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe 'projects/builds/show', :view do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:pipeline) do
diff --git a/spec/views/projects/commit/_commit_box.html.haml_spec.rb b/spec/views/projects/commit/_commit_box.html.haml_spec.rb
index 8bc344bfbf6..cec87dcecc8 100644
--- a/spec/views/projects/commit/_commit_box.html.haml_spec.rb
+++ b/spec/views/projects/commit/_commit_box.html.haml_spec.rb
@@ -4,7 +4,7 @@ describe 'projects/commit/_commit_box.html.haml' do
include Devise::Test::ControllerHelpers
let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
before do
assign(:project, project)
diff --git a/spec/views/projects/issues/_related_branches.html.haml_spec.rb b/spec/views/projects/issues/_related_branches.html.haml_spec.rb
index 889d9a38887..8c845251765 100644
--- a/spec/views/projects/issues/_related_branches.html.haml_spec.rb
+++ b/spec/views/projects/issues/_related_branches.html.haml_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe 'projects/issues/_related_branches' do
include Devise::Test::ControllerHelpers
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:branch) { project.repository.find_branch('feature') }
let!(:pipeline) { create(:ci_pipeline, project: project, sha: branch.dereferenced_target.id, ref: 'feature') }
diff --git a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
index 6f70b3daf8e..4052dbf8df3 100644
--- a/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/_commits.html.haml_spec.rb
@@ -4,11 +4,8 @@ describe 'projects/merge_requests/show/_commits.html.haml' do
include Devise::Test::ControllerHelpers
let(:user) { create(:user) }
- let(:target_project) { create(:project) }
-
- let(:source_project) do
- create(:project, forked_from_project: target_project)
- end
+ let(:target_project) { create(:project, :repository) }
+ let(:source_project) { create(:project, :repository, forked_from_project: target_project) }
let(:merge_request) do
create(:merge_request, :simple,
diff --git a/spec/views/projects/merge_requests/edit.html.haml_spec.rb b/spec/views/projects/merge_requests/edit.html.haml_spec.rb
index 3650b22c389..69c7d0cbf28 100644
--- a/spec/views/projects/merge_requests/edit.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/edit.html.haml_spec.rb
@@ -4,8 +4,8 @@ describe 'projects/merge_requests/edit.html.haml' do
include Devise::Test::ControllerHelpers
let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:fork_project) { create(:project, forked_from_project: project) }
+ let(:project) { create(:project, :repository) }
+ let(:fork_project) { create(:project, :repository, forked_from_project: project) }
let(:unlink_project) { Projects::UnlinkForkService.new(fork_project, user) }
let(:milestone) { create(:milestone, project: project) }
diff --git a/spec/views/projects/merge_requests/show.html.haml_spec.rb b/spec/views/projects/merge_requests/show.html.haml_spec.rb
index 7f123b15194..dc2fcc3e715 100644
--- a/spec/views/projects/merge_requests/show.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/show.html.haml_spec.rb
@@ -4,8 +4,8 @@ describe 'projects/merge_requests/show.html.haml' do
include Devise::Test::ControllerHelpers
let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:fork_project) { create(:project, forked_from_project: project) }
+ let(:project) { create(:project, :repository) }
+ let(:fork_project) { create(:project, :repository, forked_from_project: project) }
let(:unlink_project) { Projects::UnlinkForkService.new(fork_project, user) }
let(:note) { create(:note_on_merge_request, project: project, noteable: closed_merge_request) }
diff --git a/spec/views/projects/pipelines/_stage.html.haml_spec.rb b/spec/views/projects/pipelines/_stage.html.haml_spec.rb
index 65f9d0125e6..10095ad7694 100644
--- a/spec/views/projects/pipelines/_stage.html.haml_spec.rb
+++ b/spec/views/projects/pipelines/_stage.html.haml_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe 'projects/pipelines/_stage', :view do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:stage) { build(:ci_stage, pipeline: pipeline) }
diff --git a/spec/views/projects/pipelines/show.html.haml_spec.rb b/spec/views/projects/pipelines/show.html.haml_spec.rb
index e4aeaeca508..dca78dec6df 100644
--- a/spec/views/projects/pipelines/show.html.haml_spec.rb
+++ b/spec/views/projects/pipelines/show.html.haml_spec.rb
@@ -4,7 +4,7 @@ describe 'projects/pipelines/show' do
include Devise::Test::ControllerHelpers
let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: project.commit.id, user: user) }
before do
diff --git a/spec/views/projects/tree/show.html.haml_spec.rb b/spec/views/projects/tree/show.html.haml_spec.rb
index c381b1a86df..900f8d4732f 100644
--- a/spec/views/projects/tree/show.html.haml_spec.rb
+++ b/spec/views/projects/tree/show.html.haml_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe 'projects/tree/show' do
include Devise::Test::ControllerHelpers
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
before do
diff --git a/spec/workers/delete_merged_branches_worker_spec.rb b/spec/workers/delete_merged_branches_worker_spec.rb
index d9497bd486c..39009d9e4b2 100644
--- a/spec/workers/delete_merged_branches_worker_spec.rb
+++ b/spec/workers/delete_merged_branches_worker_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe DeleteMergedBranchesWorker do
subject(:worker) { described_class.new }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
describe "#perform" do
it "calls DeleteMergedBranchesService" do
diff --git a/spec/workers/emails_on_push_worker_spec.rb b/spec/workers/emails_on_push_worker_spec.rb
index f27e413f7b8..8cf2b888f9a 100644
--- a/spec/workers/emails_on_push_worker_spec.rb
+++ b/spec/workers/emails_on_push_worker_spec.rb
@@ -5,7 +5,7 @@ describe EmailsOnPushWorker do
include EmailHelpers
include EmailSpec::Matchers
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:data) { Gitlab::DataBuilder::Push.build_sample(project, user) }
let(:recipients) { user.email }
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index a60af574a08..029f35512e0 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -3,7 +3,7 @@ require 'fileutils'
require 'spec_helper'
describe GitGarbageCollectWorker do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:shell) { Gitlab::Shell.new }
subject { GitGarbageCollectWorker.new }
diff --git a/spec/workers/group_destroy_worker_spec.rb b/spec/workers/group_destroy_worker_spec.rb
index 4e4eaf9b2f7..1ff5a3b9034 100644
--- a/spec/workers/group_destroy_worker_spec.rb
+++ b/spec/workers/group_destroy_worker_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe GroupDestroyWorker do
let(:group) { create(:group) }
let(:user) { create(:admin) }
- let!(:project) { create(:project, namespace: group) }
+ let!(:project) { create(:empty_project, namespace: group) }
subject { GroupDestroyWorker.new }
diff --git a/spec/workers/pipeline_metrics_worker_spec.rb b/spec/workers/pipeline_metrics_worker_spec.rb
index 2d47d93acec..5dbc0da95c2 100644
--- a/spec/workers/pipeline_metrics_worker_spec.rb
+++ b/spec/workers/pipeline_metrics_worker_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe PipelineMetricsWorker do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: pipeline.ref) }
let(:pipeline) do
diff --git a/spec/workers/pipeline_notification_worker_spec.rb b/spec/workers/pipeline_notification_worker_spec.rb
index 603ae52ed1e..5a7ce2e08c4 100644
--- a/spec/workers/pipeline_notification_worker_spec.rb
+++ b/spec/workers/pipeline_notification_worker_spec.rb
@@ -11,7 +11,7 @@ describe PipelineNotificationWorker do
status: status)
end
- let(:project) { create(:project, public_builds: false) }
+ let(:project) { create(:project, :repository, public_builds: false) }
let(:user) { create(:user) }
let(:pusher) { user }
let(:watcher) { pusher }
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 7bcb5521202..a2a559a2369 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -4,7 +4,7 @@ describe PostReceive do
let(:changes) { "123456 789012 refs/heads/tést\n654321 210987 refs/tags/tag" }
let(:wrongly_encoded_changes) { changes.encode("ISO-8859-1").force_encoding("UTF-8") }
let(:base64_changes) { Base64.encode64(wrongly_encoded_changes) }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:key) { create(:key, user: project.owner) }
let(:key_id) { key.shell_id }
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 75c7fc1efd2..1c383d0514d 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe ProcessCommitWorker do
let(:worker) { described_class.new }
let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
+ let(:project) { create(:project, :public, :repository) }
let(:issue) { create(:issue, project: project, author: user) }
let(:commit) { project.commit }
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index f4f63b57a5f..c23ffdf99c0 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe ProjectCacheWorker do
let(:worker) { described_class.new }
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:statistics) { project.statistics }
describe '#perform' do
diff --git a/spec/workers/project_destroy_worker_spec.rb b/spec/workers/project_destroy_worker_spec.rb
index 1f4c39eb64a..0ab42f99510 100644
--- a/spec/workers/project_destroy_worker_spec.rb
+++ b/spec/workers/project_destroy_worker_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe ProjectDestroyWorker do
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:path) { project.repository.path_to_repo }
subject { ProjectDestroyWorker.new }
diff --git a/spec/workers/repository_check/batch_worker_spec.rb b/spec/workers/repository_check/batch_worker_spec.rb
index 27727d6abf9..bcd97a4f6ef 100644
--- a/spec/workers/repository_check/batch_worker_spec.rb
+++ b/spec/workers/repository_check/batch_worker_spec.rb
@@ -4,7 +4,7 @@ describe RepositoryCheck::BatchWorker do
subject { described_class.new }
it 'prefers projects that have never been checked' do
- projects = create_list(:project, 3, created_at: 1.week.ago)
+ projects = create_list(:empty_project, 3, created_at: 1.week.ago)
projects[0].update_column(:last_repository_check_at, 4.months.ago)
projects[2].update_column(:last_repository_check_at, 3.months.ago)
@@ -12,7 +12,7 @@ describe RepositoryCheck::BatchWorker do
end
it 'sorts projects by last_repository_check_at' do
- projects = create_list(:project, 3, created_at: 1.week.ago)
+ projects = create_list(:empty_project, 3, created_at: 1.week.ago)
projects[0].update_column(:last_repository_check_at, 2.months.ago)
projects[1].update_column(:last_repository_check_at, 4.months.ago)
projects[2].update_column(:last_repository_check_at, 3.months.ago)
@@ -21,7 +21,7 @@ describe RepositoryCheck::BatchWorker do
end
it 'excludes projects that were checked recently' do
- projects = create_list(:project, 3, created_at: 1.week.ago)
+ projects = create_list(:empty_project, 3, created_at: 1.week.ago)
projects[0].update_column(:last_repository_check_at, 2.days.ago)
projects[1].update_column(:last_repository_check_at, 2.months.ago)
projects[2].update_column(:last_repository_check_at, 3.days.ago)
@@ -40,7 +40,7 @@ describe RepositoryCheck::BatchWorker do
it 'skips projects created less than 24 hours ago' do
project = create(:empty_project)
project.update_column(:created_at, 23.hours.ago)
-
+
expect(subject.perform).to eq([])
end
end
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index 87521ae408e..7d6a2db2972 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -1,8 +1,8 @@
require 'spec_helper'
describe RepositoryForkWorker do
- let(:project) { create(:project) }
- let(:fork_project) { create(:project, forked_from_project: project) }
+ let(:project) { create(:project, :repository) }
+ let(:fork_project) { create(:project, :repository, forked_from_project: project) }
let(:shell) { Gitlab::Shell.new }
subject { RepositoryForkWorker.new }
diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb
index c42f3147b7a..fbb22439f33 100644
--- a/spec/workers/repository_import_worker_spec.rb
+++ b/spec/workers/repository_import_worker_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe RepositoryImportWorker do
- let(:project) { create(:project) }
+ let(:project) { create(:empty_project) }
subject { described_class.new }
diff --git a/spec/workers/update_merge_requests_worker_spec.rb b/spec/workers/update_merge_requests_worker_spec.rb
index 262d6e5a9ab..558ff9109ec 100644
--- a/spec/workers/update_merge_requests_worker_spec.rb
+++ b/spec/workers/update_merge_requests_worker_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
describe UpdateMergeRequestsWorker do
include RepoHelpers
- let(:project) { create(:project) }
+ let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
subject { described_class.new }
diff --git a/vendor/assets/javascripts/notebooklab.js b/vendor/assets/javascripts/notebooklab.js
index 35397019173..296271205d1 100644
--- a/vendor/assets/javascripts/notebooklab.js
+++ b/vendor/assets/javascripts/notebooklab.js
@@ -73,7 +73,7 @@ return /******/ (function(modules) { // webpackBootstrap
/******/ __webpack_require__.p = "";
/******/
/******/ // Load entry module and return exports
-/******/ return __webpack_require__(__webpack_require__.s = 32);
+/******/ return __webpack_require__(__webpack_require__.s = 47);
/******/ })
/************************************************************************/
/******/ ([
@@ -137,15 +137,97 @@ module.exports = function normalizeComponent (
/* 1 */
/***/ (function(module, exports, __webpack_require__) {
+/* WEBPACK VAR INJECTION */(function(Buffer) {/*
+ MIT License http://www.opensource.org/licenses/mit-license.php
+ Author Tobias Koppers @sokra
+*/
+// css base code, injected by the css-loader
+module.exports = function(useSourceMap) {
+ var list = [];
+
+ // return the list of modules as css string
+ list.toString = function toString() {
+ return this.map(function (item) {
+ var content = cssWithMappingToString(item, useSourceMap);
+ if(item[2]) {
+ return "@media " + item[2] + "{" + content + "}";
+ } else {
+ return content;
+ }
+ }).join("");
+ };
+
+ // import a list of modules into the list
+ list.i = function(modules, mediaQuery) {
+ if(typeof modules === "string")
+ modules = [[null, modules, ""]];
+ var alreadyImportedModules = {};
+ for(var i = 0; i < this.length; i++) {
+ var id = this[i][0];
+ if(typeof id === "number")
+ alreadyImportedModules[id] = true;
+ }
+ for(i = 0; i < modules.length; i++) {
+ var item = modules[i];
+ // skip already imported module
+ // this implementation is not 100% perfect for weird media query combinations
+ // when a module is imported multiple times with different media queries.
+ // I hope this will never occur (Hey this way we have smaller bundles)
+ if(typeof item[0] !== "number" || !alreadyImportedModules[item[0]]) {
+ if(mediaQuery && !item[2]) {
+ item[2] = mediaQuery;
+ } else if(mediaQuery) {
+ item[2] = "(" + item[2] + ") and (" + mediaQuery + ")";
+ }
+ list.push(item);
+ }
+ }
+ };
+ return list;
+};
+
+function cssWithMappingToString(item, useSourceMap) {
+ var content = item[1] || '';
+ var cssMapping = item[3];
+ if (!cssMapping) {
+ return content;
+ }
+
+ if (useSourceMap) {
+ var sourceMapping = toComment(cssMapping);
+ var sourceURLs = cssMapping.sources.map(function (source) {
+ return '/*# sourceURL=' + cssMapping.sourceRoot + source + ' */'
+ });
+
+ return [content].concat(sourceURLs).concat([sourceMapping]).join('\n');
+ }
+
+ return [content].join('\n');
+}
+
+// Adapted from convert-source-map (MIT)
+function toComment(sourceMap) {
+ var base64 = new Buffer(JSON.stringify(sourceMap)).toString('base64');
+ var data = 'sourceMappingURL=data:application/json;charset=utf-8;base64,' + base64;
+
+ return '/*# ' + data + ' */';
+}
+
+/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(18).Buffer))
+
+/***/ }),
+/* 2 */
+/***/ (function(module, exports, __webpack_require__) {
+
/* styles */
-__webpack_require__(43)
+__webpack_require__(44)
var Component = __webpack_require__(0)(
/* script */
- __webpack_require__(11),
+ __webpack_require__(13),
/* template */
- __webpack_require__(29),
+ __webpack_require__(39),
/* scopeId */
"data-v-4f6bf458",
/* cssModules */
@@ -172,7 +254,228 @@ module.exports = Component.exports
/***/ }),
-/* 2 */
+/* 3 */
+/***/ (function(module, exports, __webpack_require__) {
+
+/*
+ MIT License http://www.opensource.org/licenses/mit-license.php
+ Author Tobias Koppers @sokra
+ Modified by Evan You @yyx990803
+*/
+
+var hasDocument = typeof document !== 'undefined'
+
+if (typeof DEBUG !== 'undefined' && DEBUG) {
+ if (!hasDocument) {
+ throw new Error(
+ 'vue-style-loader cannot be used in a non-browser environment. ' +
+ "Use { target: 'node' } in your Webpack config to indicate a server-rendering environment."
+ ) }
+}
+
+var listToStyles = __webpack_require__(46)
+
+/*
+type StyleObject = {
+ id: number;
+ parts: Array<StyleObjectPart>
+}
+
+type StyleObjectPart = {
+ css: string;
+ media: string;
+ sourceMap: ?string
+}
+*/
+
+var stylesInDom = {/*
+ [id: number]: {
+ id: number,
+ refs: number,
+ parts: Array<(obj?: StyleObjectPart) => void>
+ }
+*/}
+
+var head = hasDocument && (document.head || document.getElementsByTagName('head')[0])
+var singletonElement = null
+var singletonCounter = 0
+var isProduction = false
+var noop = function () {}
+
+// Force single-tag solution on IE6-9, which has a hard limit on the # of <style>
+// tags it will allow on a page
+var isOldIE = typeof navigator !== 'undefined' && /msie [6-9]\b/.test(navigator.userAgent.toLowerCase())
+
+module.exports = function (parentId, list, _isProduction) {
+ isProduction = _isProduction
+
+ var styles = listToStyles(parentId, list)
+ addStylesToDom(styles)
+
+ return function update (newList) {
+ var mayRemove = []
+ for (var i = 0; i < styles.length; i++) {
+ var item = styles[i]
+ var domStyle = stylesInDom[item.id]
+ domStyle.refs--
+ mayRemove.push(domStyle)
+ }
+ if (newList) {
+ styles = listToStyles(parentId, newList)
+ addStylesToDom(styles)
+ } else {
+ styles = []
+ }
+ for (var i = 0; i < mayRemove.length; i++) {
+ var domStyle = mayRemove[i]
+ if (domStyle.refs === 0) {
+ for (var j = 0; j < domStyle.parts.length; j++) {
+ domStyle.parts[j]()
+ }
+ delete stylesInDom[domStyle.id]
+ }
+ }
+ }
+}
+
+function addStylesToDom (styles /* Array<StyleObject> */) {
+ for (var i = 0; i < styles.length; i++) {
+ var item = styles[i]
+ var domStyle = stylesInDom[item.id]
+ if (domStyle) {
+ domStyle.refs++
+ for (var j = 0; j < domStyle.parts.length; j++) {
+ domStyle.parts[j](item.parts[j])
+ }
+ for (; j < item.parts.length; j++) {
+ domStyle.parts.push(addStyle(item.parts[j]))
+ }
+ if (domStyle.parts.length > item.parts.length) {
+ domStyle.parts.length = item.parts.length
+ }
+ } else {
+ var parts = []
+ for (var j = 0; j < item.parts.length; j++) {
+ parts.push(addStyle(item.parts[j]))
+ }
+ stylesInDom[item.id] = { id: item.id, refs: 1, parts: parts }
+ }
+ }
+}
+
+function createStyleElement () {
+ var styleElement = document.createElement('style')
+ styleElement.type = 'text/css'
+ head.appendChild(styleElement)
+ return styleElement
+}
+
+function addStyle (obj /* StyleObjectPart */) {
+ var update, remove
+ var styleElement = document.querySelector('style[data-vue-ssr-id~="' + obj.id + '"]')
+
+ if (styleElement) {
+ if (isProduction) {
+ // has SSR styles and in production mode.
+ // simply do nothing.
+ return noop
+ } else {
+ // has SSR styles but in dev mode.
+ // for some reason Chrome can't handle source map in server-rendered
+ // style tags - source maps in <style> only works if the style tag is
+ // created and inserted dynamically. So we remove the server rendered
+ // styles and inject new ones.
+ styleElement.parentNode.removeChild(styleElement)
+ }
+ }
+
+ if (isOldIE) {
+ // use singleton mode for IE9.
+ var styleIndex = singletonCounter++
+ styleElement = singletonElement || (singletonElement = createStyleElement())
+ update = applyToSingletonTag.bind(null, styleElement, styleIndex, false)
+ remove = applyToSingletonTag.bind(null, styleElement, styleIndex, true)
+ } else {
+ // use multi-style-tag mode in all other cases
+ styleElement = createStyleElement()
+ update = applyToTag.bind(null, styleElement)
+ remove = function () {
+ styleElement.parentNode.removeChild(styleElement)
+ }
+ }
+
+ update(obj)
+
+ return function updateStyle (newObj /* StyleObjectPart */) {
+ if (newObj) {
+ if (newObj.css === obj.css &&
+ newObj.media === obj.media &&
+ newObj.sourceMap === obj.sourceMap) {
+ return
+ }
+ update(obj = newObj)
+ } else {
+ remove()
+ }
+ }
+}
+
+var replaceText = (function () {
+ var textStore = []
+
+ return function (index, replacement) {
+ textStore[index] = replacement
+ return textStore.filter(Boolean).join('\n')
+ }
+})()
+
+function applyToSingletonTag (styleElement, index, remove, obj) {
+ var css = remove ? '' : obj.css
+
+ if (styleElement.styleSheet) {
+ styleElement.styleSheet.cssText = replaceText(index, css)
+ } else {
+ var cssNode = document.createTextNode(css)
+ var childNodes = styleElement.childNodes
+ if (childNodes[index]) styleElement.removeChild(childNodes[index])
+ if (childNodes.length) {
+ styleElement.insertBefore(cssNode, childNodes[index])
+ } else {
+ styleElement.appendChild(cssNode)
+ }
+ }
+}
+
+function applyToTag (styleElement, obj) {
+ var css = obj.css
+ var media = obj.media
+ var sourceMap = obj.sourceMap
+
+ if (media) {
+ styleElement.setAttribute('media', media)
+ }
+
+ if (sourceMap) {
+ // https://developer.chrome.com/devtools/docs/javascript-debugging
+ // this makes source maps inside style tags work properly in Chrome
+ css += '\n/*# sourceURL=' + sourceMap.sources[0] + ' */'
+ // http://stackoverflow.com/a/26603875
+ css += '\n/*# sourceMappingURL=data:application/json;base64,' + btoa(unescape(encodeURIComponent(JSON.stringify(sourceMap)))) + ' */'
+ }
+
+ if (styleElement.styleSheet) {
+ styleElement.styleSheet.cssText = css
+ } else {
+ while (styleElement.firstChild) {
+ styleElement.removeChild(styleElement.firstChild)
+ }
+ styleElement.appendChild(document.createTextNode(css))
+ }
+}
+
+
+/***/ }),
+/* 4 */
/***/ (function(module, exports) {
var g;
@@ -199,14 +502,14 @@ module.exports = g;
/***/ }),
-/* 3 */
+/* 5 */
/***/ (function(module, exports, __webpack_require__) {
var Component = __webpack_require__(0)(
/* script */
- __webpack_require__(6),
+ __webpack_require__(8),
/* template */
- __webpack_require__(31),
+ __webpack_require__(41),
/* scopeId */
null,
/* cssModules */
@@ -233,18 +536,18 @@ module.exports = Component.exports
/***/ }),
-/* 4 */
+/* 6 */
/***/ (function(module, exports, __webpack_require__) {
/* styles */
-__webpack_require__(39)
+__webpack_require__(43)
var Component = __webpack_require__(0)(
/* script */
- __webpack_require__(12),
+ __webpack_require__(14),
/* template */
- __webpack_require__(28),
+ __webpack_require__(38),
/* scopeId */
null,
/* cssModules */
@@ -271,7 +574,7 @@ module.exports = Component.exports
/***/ }),
-/* 5 */
+/* 7 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -281,11 +584,11 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
-var _index = __webpack_require__(3);
+var _index = __webpack_require__(5);
var _index2 = _interopRequireDefault(_index);
-var _index3 = __webpack_require__(23);
+var _index3 = __webpack_require__(33);
var _index4 = _interopRequireDefault(_index3);
@@ -325,7 +628,11 @@ exports.default = {
},
computed: {
rawInputCode: function rawInputCode() {
- return this.cell.source.join('');
+ if (this.cell.source) {
+ return this.cell.source.join('');
+ } else {
+ return '';
+ }
},
hasOutput: function hasOutput() {
return this.cell.outputs.length;
@@ -337,7 +644,7 @@ exports.default = {
};
/***/ }),
-/* 6 */
+/* 8 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -347,11 +654,11 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
-var _highlight = __webpack_require__(14);
+var _highlight = __webpack_require__(16);
var _highlight2 = _interopRequireDefault(_highlight);
-var _prompt = __webpack_require__(1);
+var _prompt = __webpack_require__(2);
var _prompt2 = _interopRequireDefault(_prompt);
@@ -379,7 +686,8 @@ exports.default = {
props: {
count: {
type: Number,
- required: false
+ required: false,
+ default: 0
},
codeCssClass: {
type: String,
@@ -411,7 +719,7 @@ exports.default = {
};
/***/ }),
-/* 7 */
+/* 9 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -421,11 +729,11 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
-var _marked = __webpack_require__(15);
+var _marked = __webpack_require__(25);
var _marked2 = _interopRequireDefault(_marked);
-var _prompt = __webpack_require__(1);
+var _prompt = __webpack_require__(2);
var _prompt2 = _interopRequireDefault(_prompt);
@@ -470,7 +778,7 @@ exports.default = {
};
/***/ }),
-/* 8 */
+/* 10 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -480,7 +788,7 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
-var _prompt = __webpack_require__(1);
+var _prompt = __webpack_require__(2);
var _prompt2 = _interopRequireDefault(_prompt);
@@ -505,7 +813,7 @@ exports.default = {
//
/***/ }),
-/* 9 */
+/* 11 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -515,7 +823,7 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
-var _prompt = __webpack_require__(1);
+var _prompt = __webpack_require__(2);
var _prompt2 = _interopRequireDefault(_prompt);
@@ -545,7 +853,7 @@ exports.default = {
//
/***/ }),
-/* 10 */
+/* 12 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -565,15 +873,15 @@ var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol
//
//
-var _index = __webpack_require__(3);
+var _index = __webpack_require__(5);
var _index2 = _interopRequireDefault(_index);
-var _html = __webpack_require__(21);
+var _html = __webpack_require__(31);
var _html2 = _interopRequireDefault(_html);
-var _image = __webpack_require__(22);
+var _image = __webpack_require__(32);
var _image2 = _interopRequireDefault(_image);
@@ -589,7 +897,7 @@ exports.default = {
count: {
type: Number,
required: false,
- default: false
+ default: 0
},
output: {
type: Object,
@@ -650,7 +958,7 @@ exports.default = {
};
/***/ }),
-/* 11 */
+/* 13 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -682,7 +990,7 @@ exports.default = {
};
/***/ }),
-/* 12 */
+/* 14 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -692,7 +1000,7 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
-var _cells = __webpack_require__(13);
+var _cells = __webpack_require__(15);
exports.default = {
components: {
@@ -716,6 +1024,20 @@ exports.default = {
}
},
computed: {
+ cells: function cells() {
+ if (this.notebook.worksheets) {
+ var data = {
+ cells: []
+ };
+
+ return this.notebook.worksheets.reduce(function (data, sheet) {
+ data.cells = data.cells.concat(sheet.cells);
+ return data;
+ }, data).cells;
+ } else {
+ return this.notebook.cells;
+ }
+ },
hasNotebook: function hasNotebook() {
return Object.keys(this.notebook).length;
}
@@ -733,7 +1055,7 @@ exports.default = {
//
/***/ }),
-/* 13 */
+/* 15 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -743,7 +1065,7 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
-var _markdown = __webpack_require__(20);
+var _markdown = __webpack_require__(30);
Object.defineProperty(exports, 'MarkdownCell', {
enumerable: true,
@@ -752,7 +1074,7 @@ Object.defineProperty(exports, 'MarkdownCell', {
}
});
-var _code = __webpack_require__(19);
+var _code = __webpack_require__(29);
Object.defineProperty(exports, 'CodeCell', {
enumerable: true,
@@ -764,7 +1086,7 @@ Object.defineProperty(exports, 'CodeCell', {
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/***/ }),
-/* 14 */
+/* 16 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
@@ -774,13 +1096,13 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
-var _prismjs = __webpack_require__(18);
+var _prismjs = __webpack_require__(28);
var _prismjs2 = _interopRequireDefault(_prismjs);
-__webpack_require__(16);
+__webpack_require__(26);
-__webpack_require__(17);
+__webpack_require__(27);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -804,7 +1126,2082 @@ _prismjs2.default.plugins.customClass.map({
exports.default = _prismjs2.default;
/***/ }),
-/* 15 */
+/* 17 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+exports.byteLength = byteLength
+exports.toByteArray = toByteArray
+exports.fromByteArray = fromByteArray
+
+var lookup = []
+var revLookup = []
+var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array
+
+var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
+for (var i = 0, len = code.length; i < len; ++i) {
+ lookup[i] = code[i]
+ revLookup[code.charCodeAt(i)] = i
+}
+
+revLookup['-'.charCodeAt(0)] = 62
+revLookup['_'.charCodeAt(0)] = 63
+
+function placeHoldersCount (b64) {
+ var len = b64.length
+ if (len % 4 > 0) {
+ throw new Error('Invalid string. Length must be a multiple of 4')
+ }
+
+ // the number of equal signs (place holders)
+ // if there are two placeholders, than the two characters before it
+ // represent one byte
+ // if there is only one, then the three characters before it represent 2 bytes
+ // this is just a cheap hack to not do indexOf twice
+ return b64[len - 2] === '=' ? 2 : b64[len - 1] === '=' ? 1 : 0
+}
+
+function byteLength (b64) {
+ // base64 is 4/3 + up to two characters of the original data
+ return b64.length * 3 / 4 - placeHoldersCount(b64)
+}
+
+function toByteArray (b64) {
+ var i, j, l, tmp, placeHolders, arr
+ var len = b64.length
+ placeHolders = placeHoldersCount(b64)
+
+ arr = new Arr(len * 3 / 4 - placeHolders)
+
+ // if there are placeholders, only get up to the last complete 4 chars
+ l = placeHolders > 0 ? len - 4 : len
+
+ var L = 0
+
+ for (i = 0, j = 0; i < l; i += 4, j += 3) {
+ tmp = (revLookup[b64.charCodeAt(i)] << 18) | (revLookup[b64.charCodeAt(i + 1)] << 12) | (revLookup[b64.charCodeAt(i + 2)] << 6) | revLookup[b64.charCodeAt(i + 3)]
+ arr[L++] = (tmp >> 16) & 0xFF
+ arr[L++] = (tmp >> 8) & 0xFF
+ arr[L++] = tmp & 0xFF
+ }
+
+ if (placeHolders === 2) {
+ tmp = (revLookup[b64.charCodeAt(i)] << 2) | (revLookup[b64.charCodeAt(i + 1)] >> 4)
+ arr[L++] = tmp & 0xFF
+ } else if (placeHolders === 1) {
+ tmp = (revLookup[b64.charCodeAt(i)] << 10) | (revLookup[b64.charCodeAt(i + 1)] << 4) | (revLookup[b64.charCodeAt(i + 2)] >> 2)
+ arr[L++] = (tmp >> 8) & 0xFF
+ arr[L++] = tmp & 0xFF
+ }
+
+ return arr
+}
+
+function tripletToBase64 (num) {
+ return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F]
+}
+
+function encodeChunk (uint8, start, end) {
+ var tmp
+ var output = []
+ for (var i = start; i < end; i += 3) {
+ tmp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2])
+ output.push(tripletToBase64(tmp))
+ }
+ return output.join('')
+}
+
+function fromByteArray (uint8) {
+ var tmp
+ var len = uint8.length
+ var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
+ var output = ''
+ var parts = []
+ var maxChunkLength = 16383 // must be multiple of 3
+
+ // go through the array every three bytes, we'll deal with trailing stuff later
+ for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
+ parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
+ }
+
+ // pad the end with zeros, but make sure to not forget the extra bytes
+ if (extraBytes === 1) {
+ tmp = uint8[len - 1]
+ output += lookup[tmp >> 2]
+ output += lookup[(tmp << 4) & 0x3F]
+ output += '=='
+ } else if (extraBytes === 2) {
+ tmp = (uint8[len - 2] << 8) + (uint8[len - 1])
+ output += lookup[tmp >> 10]
+ output += lookup[(tmp >> 4) & 0x3F]
+ output += lookup[(tmp << 2) & 0x3F]
+ output += '='
+ }
+
+ parts.push(output)
+
+ return parts.join('')
+}
+
+
+/***/ }),
+/* 18 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+/* WEBPACK VAR INJECTION */(function(global) {/*!
+ * The buffer module from node.js, for the browser.
+ *
+ * @author Feross Aboukhadijeh <feross@feross.org> <http://feross.org>
+ * @license MIT
+ */
+/* eslint-disable no-proto */
+
+
+
+var base64 = __webpack_require__(17)
+var ieee754 = __webpack_require__(23)
+var isArray = __webpack_require__(24)
+
+exports.Buffer = Buffer
+exports.SlowBuffer = SlowBuffer
+exports.INSPECT_MAX_BYTES = 50
+
+/**
+ * If `Buffer.TYPED_ARRAY_SUPPORT`:
+ * === true Use Uint8Array implementation (fastest)
+ * === false Use Object implementation (most compatible, even IE6)
+ *
+ * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
+ * Opera 11.6+, iOS 4.2+.
+ *
+ * Due to various browser bugs, sometimes the Object implementation will be used even
+ * when the browser supports typed arrays.
+ *
+ * Note:
+ *
+ * - Firefox 4-29 lacks support for adding new properties to `Uint8Array` instances,
+ * See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438.
+ *
+ * - Chrome 9-10 is missing the `TypedArray.prototype.subarray` function.
+ *
+ * - IE10 has a broken `TypedArray.prototype.subarray` function which returns arrays of
+ * incorrect length in some situations.
+
+ * We detect these buggy browsers and set `Buffer.TYPED_ARRAY_SUPPORT` to `false` so they
+ * get the Object implementation, which is slower but behaves correctly.
+ */
+Buffer.TYPED_ARRAY_SUPPORT = global.TYPED_ARRAY_SUPPORT !== undefined
+ ? global.TYPED_ARRAY_SUPPORT
+ : typedArraySupport()
+
+/*
+ * Export kMaxLength after typed array support is determined.
+ */
+exports.kMaxLength = kMaxLength()
+
+function typedArraySupport () {
+ try {
+ var arr = new Uint8Array(1)
+ arr.__proto__ = {__proto__: Uint8Array.prototype, foo: function () { return 42 }}
+ return arr.foo() === 42 && // typed array instances can be augmented
+ typeof arr.subarray === 'function' && // chrome 9-10 lack `subarray`
+ arr.subarray(1, 1).byteLength === 0 // ie10 has broken `subarray`
+ } catch (e) {
+ return false
+ }
+}
+
+function kMaxLength () {
+ return Buffer.TYPED_ARRAY_SUPPORT
+ ? 0x7fffffff
+ : 0x3fffffff
+}
+
+function createBuffer (that, length) {
+ if (kMaxLength() < length) {
+ throw new RangeError('Invalid typed array length')
+ }
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ // Return an augmented `Uint8Array` instance, for best performance
+ that = new Uint8Array(length)
+ that.__proto__ = Buffer.prototype
+ } else {
+ // Fallback: Return an object instance of the Buffer class
+ if (that === null) {
+ that = new Buffer(length)
+ }
+ that.length = length
+ }
+
+ return that
+}
+
+/**
+ * The Buffer constructor returns instances of `Uint8Array` that have their
+ * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
+ * `Uint8Array`, so the returned instances will have all the node `Buffer` methods
+ * and the `Uint8Array` methods. Square bracket notation works as expected -- it
+ * returns a single octet.
+ *
+ * The `Uint8Array` prototype remains unmodified.
+ */
+
+function Buffer (arg, encodingOrOffset, length) {
+ if (!Buffer.TYPED_ARRAY_SUPPORT && !(this instanceof Buffer)) {
+ return new Buffer(arg, encodingOrOffset, length)
+ }
+
+ // Common case.
+ if (typeof arg === 'number') {
+ if (typeof encodingOrOffset === 'string') {
+ throw new Error(
+ 'If encoding is specified then the first argument must be a string'
+ )
+ }
+ return allocUnsafe(this, arg)
+ }
+ return from(this, arg, encodingOrOffset, length)
+}
+
+Buffer.poolSize = 8192 // not used by this implementation
+
+// TODO: Legacy, not needed anymore. Remove in next major version.
+Buffer._augment = function (arr) {
+ arr.__proto__ = Buffer.prototype
+ return arr
+}
+
+function from (that, value, encodingOrOffset, length) {
+ if (typeof value === 'number') {
+ throw new TypeError('"value" argument must not be a number')
+ }
+
+ if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) {
+ return fromArrayBuffer(that, value, encodingOrOffset, length)
+ }
+
+ if (typeof value === 'string') {
+ return fromString(that, value, encodingOrOffset)
+ }
+
+ return fromObject(that, value)
+}
+
+/**
+ * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
+ * if value is a number.
+ * Buffer.from(str[, encoding])
+ * Buffer.from(array)
+ * Buffer.from(buffer)
+ * Buffer.from(arrayBuffer[, byteOffset[, length]])
+ **/
+Buffer.from = function (value, encodingOrOffset, length) {
+ return from(null, value, encodingOrOffset, length)
+}
+
+if (Buffer.TYPED_ARRAY_SUPPORT) {
+ Buffer.prototype.__proto__ = Uint8Array.prototype
+ Buffer.__proto__ = Uint8Array
+ if (typeof Symbol !== 'undefined' && Symbol.species &&
+ Buffer[Symbol.species] === Buffer) {
+ // Fix subarray() in ES2016. See: https://github.com/feross/buffer/pull/97
+ Object.defineProperty(Buffer, Symbol.species, {
+ value: null,
+ configurable: true
+ })
+ }
+}
+
+function assertSize (size) {
+ if (typeof size !== 'number') {
+ throw new TypeError('"size" argument must be a number')
+ } else if (size < 0) {
+ throw new RangeError('"size" argument must not be negative')
+ }
+}
+
+function alloc (that, size, fill, encoding) {
+ assertSize(size)
+ if (size <= 0) {
+ return createBuffer(that, size)
+ }
+ if (fill !== undefined) {
+ // Only pay attention to encoding if it's a string. This
+ // prevents accidentally sending in a number that would
+ // be interpretted as a start offset.
+ return typeof encoding === 'string'
+ ? createBuffer(that, size).fill(fill, encoding)
+ : createBuffer(that, size).fill(fill)
+ }
+ return createBuffer(that, size)
+}
+
+/**
+ * Creates a new filled Buffer instance.
+ * alloc(size[, fill[, encoding]])
+ **/
+Buffer.alloc = function (size, fill, encoding) {
+ return alloc(null, size, fill, encoding)
+}
+
+function allocUnsafe (that, size) {
+ assertSize(size)
+ that = createBuffer(that, size < 0 ? 0 : checked(size) | 0)
+ if (!Buffer.TYPED_ARRAY_SUPPORT) {
+ for (var i = 0; i < size; ++i) {
+ that[i] = 0
+ }
+ }
+ return that
+}
+
+/**
+ * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
+ * */
+Buffer.allocUnsafe = function (size) {
+ return allocUnsafe(null, size)
+}
+/**
+ * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
+ */
+Buffer.allocUnsafeSlow = function (size) {
+ return allocUnsafe(null, size)
+}
+
+function fromString (that, string, encoding) {
+ if (typeof encoding !== 'string' || encoding === '') {
+ encoding = 'utf8'
+ }
+
+ if (!Buffer.isEncoding(encoding)) {
+ throw new TypeError('"encoding" must be a valid string encoding')
+ }
+
+ var length = byteLength(string, encoding) | 0
+ that = createBuffer(that, length)
+
+ var actual = that.write(string, encoding)
+
+ if (actual !== length) {
+ // Writing a hex string, for example, that contains invalid characters will
+ // cause everything after the first invalid character to be ignored. (e.g.
+ // 'abxxcd' will be treated as 'ab')
+ that = that.slice(0, actual)
+ }
+
+ return that
+}
+
+function fromArrayLike (that, array) {
+ var length = array.length < 0 ? 0 : checked(array.length) | 0
+ that = createBuffer(that, length)
+ for (var i = 0; i < length; i += 1) {
+ that[i] = array[i] & 255
+ }
+ return that
+}
+
+function fromArrayBuffer (that, array, byteOffset, length) {
+ array.byteLength // this throws if `array` is not a valid ArrayBuffer
+
+ if (byteOffset < 0 || array.byteLength < byteOffset) {
+ throw new RangeError('\'offset\' is out of bounds')
+ }
+
+ if (array.byteLength < byteOffset + (length || 0)) {
+ throw new RangeError('\'length\' is out of bounds')
+ }
+
+ if (byteOffset === undefined && length === undefined) {
+ array = new Uint8Array(array)
+ } else if (length === undefined) {
+ array = new Uint8Array(array, byteOffset)
+ } else {
+ array = new Uint8Array(array, byteOffset, length)
+ }
+
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ // Return an augmented `Uint8Array` instance, for best performance
+ that = array
+ that.__proto__ = Buffer.prototype
+ } else {
+ // Fallback: Return an object instance of the Buffer class
+ that = fromArrayLike(that, array)
+ }
+ return that
+}
+
+function fromObject (that, obj) {
+ if (Buffer.isBuffer(obj)) {
+ var len = checked(obj.length) | 0
+ that = createBuffer(that, len)
+
+ if (that.length === 0) {
+ return that
+ }
+
+ obj.copy(that, 0, 0, len)
+ return that
+ }
+
+ if (obj) {
+ if ((typeof ArrayBuffer !== 'undefined' &&
+ obj.buffer instanceof ArrayBuffer) || 'length' in obj) {
+ if (typeof obj.length !== 'number' || isnan(obj.length)) {
+ return createBuffer(that, 0)
+ }
+ return fromArrayLike(that, obj)
+ }
+
+ if (obj.type === 'Buffer' && isArray(obj.data)) {
+ return fromArrayLike(that, obj.data)
+ }
+ }
+
+ throw new TypeError('First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.')
+}
+
+function checked (length) {
+ // Note: cannot use `length < kMaxLength()` here because that fails when
+ // length is NaN (which is otherwise coerced to zero.)
+ if (length >= kMaxLength()) {
+ throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
+ 'size: 0x' + kMaxLength().toString(16) + ' bytes')
+ }
+ return length | 0
+}
+
+function SlowBuffer (length) {
+ if (+length != length) { // eslint-disable-line eqeqeq
+ length = 0
+ }
+ return Buffer.alloc(+length)
+}
+
+Buffer.isBuffer = function isBuffer (b) {
+ return !!(b != null && b._isBuffer)
+}
+
+Buffer.compare = function compare (a, b) {
+ if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
+ throw new TypeError('Arguments must be Buffers')
+ }
+
+ if (a === b) return 0
+
+ var x = a.length
+ var y = b.length
+
+ for (var i = 0, len = Math.min(x, y); i < len; ++i) {
+ if (a[i] !== b[i]) {
+ x = a[i]
+ y = b[i]
+ break
+ }
+ }
+
+ if (x < y) return -1
+ if (y < x) return 1
+ return 0
+}
+
+Buffer.isEncoding = function isEncoding (encoding) {
+ switch (String(encoding).toLowerCase()) {
+ case 'hex':
+ case 'utf8':
+ case 'utf-8':
+ case 'ascii':
+ case 'latin1':
+ case 'binary':
+ case 'base64':
+ case 'ucs2':
+ case 'ucs-2':
+ case 'utf16le':
+ case 'utf-16le':
+ return true
+ default:
+ return false
+ }
+}
+
+Buffer.concat = function concat (list, length) {
+ if (!isArray(list)) {
+ throw new TypeError('"list" argument must be an Array of Buffers')
+ }
+
+ if (list.length === 0) {
+ return Buffer.alloc(0)
+ }
+
+ var i
+ if (length === undefined) {
+ length = 0
+ for (i = 0; i < list.length; ++i) {
+ length += list[i].length
+ }
+ }
+
+ var buffer = Buffer.allocUnsafe(length)
+ var pos = 0
+ for (i = 0; i < list.length; ++i) {
+ var buf = list[i]
+ if (!Buffer.isBuffer(buf)) {
+ throw new TypeError('"list" argument must be an Array of Buffers')
+ }
+ buf.copy(buffer, pos)
+ pos += buf.length
+ }
+ return buffer
+}
+
+function byteLength (string, encoding) {
+ if (Buffer.isBuffer(string)) {
+ return string.length
+ }
+ if (typeof ArrayBuffer !== 'undefined' && typeof ArrayBuffer.isView === 'function' &&
+ (ArrayBuffer.isView(string) || string instanceof ArrayBuffer)) {
+ return string.byteLength
+ }
+ if (typeof string !== 'string') {
+ string = '' + string
+ }
+
+ var len = string.length
+ if (len === 0) return 0
+
+ // Use a for loop to avoid recursion
+ var loweredCase = false
+ for (;;) {
+ switch (encoding) {
+ case 'ascii':
+ case 'latin1':
+ case 'binary':
+ return len
+ case 'utf8':
+ case 'utf-8':
+ case undefined:
+ return utf8ToBytes(string).length
+ case 'ucs2':
+ case 'ucs-2':
+ case 'utf16le':
+ case 'utf-16le':
+ return len * 2
+ case 'hex':
+ return len >>> 1
+ case 'base64':
+ return base64ToBytes(string).length
+ default:
+ if (loweredCase) return utf8ToBytes(string).length // assume utf8
+ encoding = ('' + encoding).toLowerCase()
+ loweredCase = true
+ }
+ }
+}
+Buffer.byteLength = byteLength
+
+function slowToString (encoding, start, end) {
+ var loweredCase = false
+
+ // No need to verify that "this.length <= MAX_UINT32" since it's a read-only
+ // property of a typed array.
+
+ // This behaves neither like String nor Uint8Array in that we set start/end
+ // to their upper/lower bounds if the value passed is out of range.
+ // undefined is handled specially as per ECMA-262 6th Edition,
+ // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
+ if (start === undefined || start < 0) {
+ start = 0
+ }
+ // Return early if start > this.length. Done here to prevent potential uint32
+ // coercion fail below.
+ if (start > this.length) {
+ return ''
+ }
+
+ if (end === undefined || end > this.length) {
+ end = this.length
+ }
+
+ if (end <= 0) {
+ return ''
+ }
+
+ // Force coersion to uint32. This will also coerce falsey/NaN values to 0.
+ end >>>= 0
+ start >>>= 0
+
+ if (end <= start) {
+ return ''
+ }
+
+ if (!encoding) encoding = 'utf8'
+
+ while (true) {
+ switch (encoding) {
+ case 'hex':
+ return hexSlice(this, start, end)
+
+ case 'utf8':
+ case 'utf-8':
+ return utf8Slice(this, start, end)
+
+ case 'ascii':
+ return asciiSlice(this, start, end)
+
+ case 'latin1':
+ case 'binary':
+ return latin1Slice(this, start, end)
+
+ case 'base64':
+ return base64Slice(this, start, end)
+
+ case 'ucs2':
+ case 'ucs-2':
+ case 'utf16le':
+ case 'utf-16le':
+ return utf16leSlice(this, start, end)
+
+ default:
+ if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
+ encoding = (encoding + '').toLowerCase()
+ loweredCase = true
+ }
+ }
+}
+
+// The property is used by `Buffer.isBuffer` and `is-buffer` (in Safari 5-7) to detect
+// Buffer instances.
+Buffer.prototype._isBuffer = true
+
+function swap (b, n, m) {
+ var i = b[n]
+ b[n] = b[m]
+ b[m] = i
+}
+
+Buffer.prototype.swap16 = function swap16 () {
+ var len = this.length
+ if (len % 2 !== 0) {
+ throw new RangeError('Buffer size must be a multiple of 16-bits')
+ }
+ for (var i = 0; i < len; i += 2) {
+ swap(this, i, i + 1)
+ }
+ return this
+}
+
+Buffer.prototype.swap32 = function swap32 () {
+ var len = this.length
+ if (len % 4 !== 0) {
+ throw new RangeError('Buffer size must be a multiple of 32-bits')
+ }
+ for (var i = 0; i < len; i += 4) {
+ swap(this, i, i + 3)
+ swap(this, i + 1, i + 2)
+ }
+ return this
+}
+
+Buffer.prototype.swap64 = function swap64 () {
+ var len = this.length
+ if (len % 8 !== 0) {
+ throw new RangeError('Buffer size must be a multiple of 64-bits')
+ }
+ for (var i = 0; i < len; i += 8) {
+ swap(this, i, i + 7)
+ swap(this, i + 1, i + 6)
+ swap(this, i + 2, i + 5)
+ swap(this, i + 3, i + 4)
+ }
+ return this
+}
+
+Buffer.prototype.toString = function toString () {
+ var length = this.length | 0
+ if (length === 0) return ''
+ if (arguments.length === 0) return utf8Slice(this, 0, length)
+ return slowToString.apply(this, arguments)
+}
+
+Buffer.prototype.equals = function equals (b) {
+ if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
+ if (this === b) return true
+ return Buffer.compare(this, b) === 0
+}
+
+Buffer.prototype.inspect = function inspect () {
+ var str = ''
+ var max = exports.INSPECT_MAX_BYTES
+ if (this.length > 0) {
+ str = this.toString('hex', 0, max).match(/.{2}/g).join(' ')
+ if (this.length > max) str += ' ... '
+ }
+ return '<Buffer ' + str + '>'
+}
+
+Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
+ if (!Buffer.isBuffer(target)) {
+ throw new TypeError('Argument must be a Buffer')
+ }
+
+ if (start === undefined) {
+ start = 0
+ }
+ if (end === undefined) {
+ end = target ? target.length : 0
+ }
+ if (thisStart === undefined) {
+ thisStart = 0
+ }
+ if (thisEnd === undefined) {
+ thisEnd = this.length
+ }
+
+ if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
+ throw new RangeError('out of range index')
+ }
+
+ if (thisStart >= thisEnd && start >= end) {
+ return 0
+ }
+ if (thisStart >= thisEnd) {
+ return -1
+ }
+ if (start >= end) {
+ return 1
+ }
+
+ start >>>= 0
+ end >>>= 0
+ thisStart >>>= 0
+ thisEnd >>>= 0
+
+ if (this === target) return 0
+
+ var x = thisEnd - thisStart
+ var y = end - start
+ var len = Math.min(x, y)
+
+ var thisCopy = this.slice(thisStart, thisEnd)
+ var targetCopy = target.slice(start, end)
+
+ for (var i = 0; i < len; ++i) {
+ if (thisCopy[i] !== targetCopy[i]) {
+ x = thisCopy[i]
+ y = targetCopy[i]
+ break
+ }
+ }
+
+ if (x < y) return -1
+ if (y < x) return 1
+ return 0
+}
+
+// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
+// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
+//
+// Arguments:
+// - buffer - a Buffer to search
+// - val - a string, Buffer, or number
+// - byteOffset - an index into `buffer`; will be clamped to an int32
+// - encoding - an optional encoding, relevant is val is a string
+// - dir - true for indexOf, false for lastIndexOf
+function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
+ // Empty buffer means no match
+ if (buffer.length === 0) return -1
+
+ // Normalize byteOffset
+ if (typeof byteOffset === 'string') {
+ encoding = byteOffset
+ byteOffset = 0
+ } else if (byteOffset > 0x7fffffff) {
+ byteOffset = 0x7fffffff
+ } else if (byteOffset < -0x80000000) {
+ byteOffset = -0x80000000
+ }
+ byteOffset = +byteOffset // Coerce to Number.
+ if (isNaN(byteOffset)) {
+ // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
+ byteOffset = dir ? 0 : (buffer.length - 1)
+ }
+
+ // Normalize byteOffset: negative offsets start from the end of the buffer
+ if (byteOffset < 0) byteOffset = buffer.length + byteOffset
+ if (byteOffset >= buffer.length) {
+ if (dir) return -1
+ else byteOffset = buffer.length - 1
+ } else if (byteOffset < 0) {
+ if (dir) byteOffset = 0
+ else return -1
+ }
+
+ // Normalize val
+ if (typeof val === 'string') {
+ val = Buffer.from(val, encoding)
+ }
+
+ // Finally, search either indexOf (if dir is true) or lastIndexOf
+ if (Buffer.isBuffer(val)) {
+ // Special case: looking for empty string/buffer always fails
+ if (val.length === 0) {
+ return -1
+ }
+ return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
+ } else if (typeof val === 'number') {
+ val = val & 0xFF // Search for a byte value [0-255]
+ if (Buffer.TYPED_ARRAY_SUPPORT &&
+ typeof Uint8Array.prototype.indexOf === 'function') {
+ if (dir) {
+ return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
+ } else {
+ return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
+ }
+ }
+ return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir)
+ }
+
+ throw new TypeError('val must be string, number or Buffer')
+}
+
+function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
+ var indexSize = 1
+ var arrLength = arr.length
+ var valLength = val.length
+
+ if (encoding !== undefined) {
+ encoding = String(encoding).toLowerCase()
+ if (encoding === 'ucs2' || encoding === 'ucs-2' ||
+ encoding === 'utf16le' || encoding === 'utf-16le') {
+ if (arr.length < 2 || val.length < 2) {
+ return -1
+ }
+ indexSize = 2
+ arrLength /= 2
+ valLength /= 2
+ byteOffset /= 2
+ }
+ }
+
+ function read (buf, i) {
+ if (indexSize === 1) {
+ return buf[i]
+ } else {
+ return buf.readUInt16BE(i * indexSize)
+ }
+ }
+
+ var i
+ if (dir) {
+ var foundIndex = -1
+ for (i = byteOffset; i < arrLength; i++) {
+ if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
+ if (foundIndex === -1) foundIndex = i
+ if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
+ } else {
+ if (foundIndex !== -1) i -= i - foundIndex
+ foundIndex = -1
+ }
+ }
+ } else {
+ if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength
+ for (i = byteOffset; i >= 0; i--) {
+ var found = true
+ for (var j = 0; j < valLength; j++) {
+ if (read(arr, i + j) !== read(val, j)) {
+ found = false
+ break
+ }
+ }
+ if (found) return i
+ }
+ }
+
+ return -1
+}
+
+Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
+ return this.indexOf(val, byteOffset, encoding) !== -1
+}
+
+Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
+ return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
+}
+
+Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
+ return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
+}
+
+function hexWrite (buf, string, offset, length) {
+ offset = Number(offset) || 0
+ var remaining = buf.length - offset
+ if (!length) {
+ length = remaining
+ } else {
+ length = Number(length)
+ if (length > remaining) {
+ length = remaining
+ }
+ }
+
+ // must be an even number of digits
+ var strLen = string.length
+ if (strLen % 2 !== 0) throw new TypeError('Invalid hex string')
+
+ if (length > strLen / 2) {
+ length = strLen / 2
+ }
+ for (var i = 0; i < length; ++i) {
+ var parsed = parseInt(string.substr(i * 2, 2), 16)
+ if (isNaN(parsed)) return i
+ buf[offset + i] = parsed
+ }
+ return i
+}
+
+function utf8Write (buf, string, offset, length) {
+ return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
+}
+
+function asciiWrite (buf, string, offset, length) {
+ return blitBuffer(asciiToBytes(string), buf, offset, length)
+}
+
+function latin1Write (buf, string, offset, length) {
+ return asciiWrite(buf, string, offset, length)
+}
+
+function base64Write (buf, string, offset, length) {
+ return blitBuffer(base64ToBytes(string), buf, offset, length)
+}
+
+function ucs2Write (buf, string, offset, length) {
+ return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
+}
+
+Buffer.prototype.write = function write (string, offset, length, encoding) {
+ // Buffer#write(string)
+ if (offset === undefined) {
+ encoding = 'utf8'
+ length = this.length
+ offset = 0
+ // Buffer#write(string, encoding)
+ } else if (length === undefined && typeof offset === 'string') {
+ encoding = offset
+ length = this.length
+ offset = 0
+ // Buffer#write(string, offset[, length][, encoding])
+ } else if (isFinite(offset)) {
+ offset = offset | 0
+ if (isFinite(length)) {
+ length = length | 0
+ if (encoding === undefined) encoding = 'utf8'
+ } else {
+ encoding = length
+ length = undefined
+ }
+ // legacy write(string, encoding, offset, length) - remove in v0.13
+ } else {
+ throw new Error(
+ 'Buffer.write(string, encoding, offset[, length]) is no longer supported'
+ )
+ }
+
+ var remaining = this.length - offset
+ if (length === undefined || length > remaining) length = remaining
+
+ if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
+ throw new RangeError('Attempt to write outside buffer bounds')
+ }
+
+ if (!encoding) encoding = 'utf8'
+
+ var loweredCase = false
+ for (;;) {
+ switch (encoding) {
+ case 'hex':
+ return hexWrite(this, string, offset, length)
+
+ case 'utf8':
+ case 'utf-8':
+ return utf8Write(this, string, offset, length)
+
+ case 'ascii':
+ return asciiWrite(this, string, offset, length)
+
+ case 'latin1':
+ case 'binary':
+ return latin1Write(this, string, offset, length)
+
+ case 'base64':
+ // Warning: maxLength not taken into account in base64Write
+ return base64Write(this, string, offset, length)
+
+ case 'ucs2':
+ case 'ucs-2':
+ case 'utf16le':
+ case 'utf-16le':
+ return ucs2Write(this, string, offset, length)
+
+ default:
+ if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
+ encoding = ('' + encoding).toLowerCase()
+ loweredCase = true
+ }
+ }
+}
+
+Buffer.prototype.toJSON = function toJSON () {
+ return {
+ type: 'Buffer',
+ data: Array.prototype.slice.call(this._arr || this, 0)
+ }
+}
+
+function base64Slice (buf, start, end) {
+ if (start === 0 && end === buf.length) {
+ return base64.fromByteArray(buf)
+ } else {
+ return base64.fromByteArray(buf.slice(start, end))
+ }
+}
+
+function utf8Slice (buf, start, end) {
+ end = Math.min(buf.length, end)
+ var res = []
+
+ var i = start
+ while (i < end) {
+ var firstByte = buf[i]
+ var codePoint = null
+ var bytesPerSequence = (firstByte > 0xEF) ? 4
+ : (firstByte > 0xDF) ? 3
+ : (firstByte > 0xBF) ? 2
+ : 1
+
+ if (i + bytesPerSequence <= end) {
+ var secondByte, thirdByte, fourthByte, tempCodePoint
+
+ switch (bytesPerSequence) {
+ case 1:
+ if (firstByte < 0x80) {
+ codePoint = firstByte
+ }
+ break
+ case 2:
+ secondByte = buf[i + 1]
+ if ((secondByte & 0xC0) === 0x80) {
+ tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F)
+ if (tempCodePoint > 0x7F) {
+ codePoint = tempCodePoint
+ }
+ }
+ break
+ case 3:
+ secondByte = buf[i + 1]
+ thirdByte = buf[i + 2]
+ if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
+ tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F)
+ if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
+ codePoint = tempCodePoint
+ }
+ }
+ break
+ case 4:
+ secondByte = buf[i + 1]
+ thirdByte = buf[i + 2]
+ fourthByte = buf[i + 3]
+ if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
+ tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F)
+ if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
+ codePoint = tempCodePoint
+ }
+ }
+ }
+ }
+
+ if (codePoint === null) {
+ // we did not generate a valid codePoint so insert a
+ // replacement char (U+FFFD) and advance only 1 byte
+ codePoint = 0xFFFD
+ bytesPerSequence = 1
+ } else if (codePoint > 0xFFFF) {
+ // encode to utf16 (surrogate pair dance)
+ codePoint -= 0x10000
+ res.push(codePoint >>> 10 & 0x3FF | 0xD800)
+ codePoint = 0xDC00 | codePoint & 0x3FF
+ }
+
+ res.push(codePoint)
+ i += bytesPerSequence
+ }
+
+ return decodeCodePointsArray(res)
+}
+
+// Based on http://stackoverflow.com/a/22747272/680742, the browser with
+// the lowest limit is Chrome, with 0x10000 args.
+// We go 1 magnitude less, for safety
+var MAX_ARGUMENTS_LENGTH = 0x1000
+
+function decodeCodePointsArray (codePoints) {
+ var len = codePoints.length
+ if (len <= MAX_ARGUMENTS_LENGTH) {
+ return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
+ }
+
+ // Decode in chunks to avoid "call stack size exceeded".
+ var res = ''
+ var i = 0
+ while (i < len) {
+ res += String.fromCharCode.apply(
+ String,
+ codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
+ )
+ }
+ return res
+}
+
+function asciiSlice (buf, start, end) {
+ var ret = ''
+ end = Math.min(buf.length, end)
+
+ for (var i = start; i < end; ++i) {
+ ret += String.fromCharCode(buf[i] & 0x7F)
+ }
+ return ret
+}
+
+function latin1Slice (buf, start, end) {
+ var ret = ''
+ end = Math.min(buf.length, end)
+
+ for (var i = start; i < end; ++i) {
+ ret += String.fromCharCode(buf[i])
+ }
+ return ret
+}
+
+function hexSlice (buf, start, end) {
+ var len = buf.length
+
+ if (!start || start < 0) start = 0
+ if (!end || end < 0 || end > len) end = len
+
+ var out = ''
+ for (var i = start; i < end; ++i) {
+ out += toHex(buf[i])
+ }
+ return out
+}
+
+function utf16leSlice (buf, start, end) {
+ var bytes = buf.slice(start, end)
+ var res = ''
+ for (var i = 0; i < bytes.length; i += 2) {
+ res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256)
+ }
+ return res
+}
+
+Buffer.prototype.slice = function slice (start, end) {
+ var len = this.length
+ start = ~~start
+ end = end === undefined ? len : ~~end
+
+ if (start < 0) {
+ start += len
+ if (start < 0) start = 0
+ } else if (start > len) {
+ start = len
+ }
+
+ if (end < 0) {
+ end += len
+ if (end < 0) end = 0
+ } else if (end > len) {
+ end = len
+ }
+
+ if (end < start) end = start
+
+ var newBuf
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ newBuf = this.subarray(start, end)
+ newBuf.__proto__ = Buffer.prototype
+ } else {
+ var sliceLen = end - start
+ newBuf = new Buffer(sliceLen, undefined)
+ for (var i = 0; i < sliceLen; ++i) {
+ newBuf[i] = this[i + start]
+ }
+ }
+
+ return newBuf
+}
+
+/*
+ * Need to make sure that buffer isn't trying to write out of bounds.
+ */
+function checkOffset (offset, ext, length) {
+ if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
+ if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
+}
+
+Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
+ offset = offset | 0
+ byteLength = byteLength | 0
+ if (!noAssert) checkOffset(offset, byteLength, this.length)
+
+ var val = this[offset]
+ var mul = 1
+ var i = 0
+ while (++i < byteLength && (mul *= 0x100)) {
+ val += this[offset + i] * mul
+ }
+
+ return val
+}
+
+Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
+ offset = offset | 0
+ byteLength = byteLength | 0
+ if (!noAssert) {
+ checkOffset(offset, byteLength, this.length)
+ }
+
+ var val = this[offset + --byteLength]
+ var mul = 1
+ while (byteLength > 0 && (mul *= 0x100)) {
+ val += this[offset + --byteLength] * mul
+ }
+
+ return val
+}
+
+Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 1, this.length)
+ return this[offset]
+}
+
+Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 2, this.length)
+ return this[offset] | (this[offset + 1] << 8)
+}
+
+Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 2, this.length)
+ return (this[offset] << 8) | this[offset + 1]
+}
+
+Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 4, this.length)
+
+ return ((this[offset]) |
+ (this[offset + 1] << 8) |
+ (this[offset + 2] << 16)) +
+ (this[offset + 3] * 0x1000000)
+}
+
+Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 4, this.length)
+
+ return (this[offset] * 0x1000000) +
+ ((this[offset + 1] << 16) |
+ (this[offset + 2] << 8) |
+ this[offset + 3])
+}
+
+Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
+ offset = offset | 0
+ byteLength = byteLength | 0
+ if (!noAssert) checkOffset(offset, byteLength, this.length)
+
+ var val = this[offset]
+ var mul = 1
+ var i = 0
+ while (++i < byteLength && (mul *= 0x100)) {
+ val += this[offset + i] * mul
+ }
+ mul *= 0x80
+
+ if (val >= mul) val -= Math.pow(2, 8 * byteLength)
+
+ return val
+}
+
+Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
+ offset = offset | 0
+ byteLength = byteLength | 0
+ if (!noAssert) checkOffset(offset, byteLength, this.length)
+
+ var i = byteLength
+ var mul = 1
+ var val = this[offset + --i]
+ while (i > 0 && (mul *= 0x100)) {
+ val += this[offset + --i] * mul
+ }
+ mul *= 0x80
+
+ if (val >= mul) val -= Math.pow(2, 8 * byteLength)
+
+ return val
+}
+
+Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 1, this.length)
+ if (!(this[offset] & 0x80)) return (this[offset])
+ return ((0xff - this[offset] + 1) * -1)
+}
+
+Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 2, this.length)
+ var val = this[offset] | (this[offset + 1] << 8)
+ return (val & 0x8000) ? val | 0xFFFF0000 : val
+}
+
+Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 2, this.length)
+ var val = this[offset + 1] | (this[offset] << 8)
+ return (val & 0x8000) ? val | 0xFFFF0000 : val
+}
+
+Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 4, this.length)
+
+ return (this[offset]) |
+ (this[offset + 1] << 8) |
+ (this[offset + 2] << 16) |
+ (this[offset + 3] << 24)
+}
+
+Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 4, this.length)
+
+ return (this[offset] << 24) |
+ (this[offset + 1] << 16) |
+ (this[offset + 2] << 8) |
+ (this[offset + 3])
+}
+
+Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 4, this.length)
+ return ieee754.read(this, offset, true, 23, 4)
+}
+
+Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 4, this.length)
+ return ieee754.read(this, offset, false, 23, 4)
+}
+
+Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 8, this.length)
+ return ieee754.read(this, offset, true, 52, 8)
+}
+
+Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
+ if (!noAssert) checkOffset(offset, 8, this.length)
+ return ieee754.read(this, offset, false, 52, 8)
+}
+
+function checkInt (buf, value, offset, ext, max, min) {
+ if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
+ if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
+ if (offset + ext > buf.length) throw new RangeError('Index out of range')
+}
+
+Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
+ value = +value
+ offset = offset | 0
+ byteLength = byteLength | 0
+ if (!noAssert) {
+ var maxBytes = Math.pow(2, 8 * byteLength) - 1
+ checkInt(this, value, offset, byteLength, maxBytes, 0)
+ }
+
+ var mul = 1
+ var i = 0
+ this[offset] = value & 0xFF
+ while (++i < byteLength && (mul *= 0x100)) {
+ this[offset + i] = (value / mul) & 0xFF
+ }
+
+ return offset + byteLength
+}
+
+Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
+ value = +value
+ offset = offset | 0
+ byteLength = byteLength | 0
+ if (!noAssert) {
+ var maxBytes = Math.pow(2, 8 * byteLength) - 1
+ checkInt(this, value, offset, byteLength, maxBytes, 0)
+ }
+
+ var i = byteLength - 1
+ var mul = 1
+ this[offset + i] = value & 0xFF
+ while (--i >= 0 && (mul *= 0x100)) {
+ this[offset + i] = (value / mul) & 0xFF
+ }
+
+ return offset + byteLength
+}
+
+Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0)
+ if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value)
+ this[offset] = (value & 0xff)
+ return offset + 1
+}
+
+function objectWriteUInt16 (buf, value, offset, littleEndian) {
+ if (value < 0) value = 0xffff + value + 1
+ for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; ++i) {
+ buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>>
+ (littleEndian ? i : 1 - i) * 8
+ }
+}
+
+Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ this[offset] = (value & 0xff)
+ this[offset + 1] = (value >>> 8)
+ } else {
+ objectWriteUInt16(this, value, offset, true)
+ }
+ return offset + 2
+}
+
+Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ this[offset] = (value >>> 8)
+ this[offset + 1] = (value & 0xff)
+ } else {
+ objectWriteUInt16(this, value, offset, false)
+ }
+ return offset + 2
+}
+
+function objectWriteUInt32 (buf, value, offset, littleEndian) {
+ if (value < 0) value = 0xffffffff + value + 1
+ for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; ++i) {
+ buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff
+ }
+}
+
+Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ this[offset + 3] = (value >>> 24)
+ this[offset + 2] = (value >>> 16)
+ this[offset + 1] = (value >>> 8)
+ this[offset] = (value & 0xff)
+ } else {
+ objectWriteUInt32(this, value, offset, true)
+ }
+ return offset + 4
+}
+
+Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ this[offset] = (value >>> 24)
+ this[offset + 1] = (value >>> 16)
+ this[offset + 2] = (value >>> 8)
+ this[offset + 3] = (value & 0xff)
+ } else {
+ objectWriteUInt32(this, value, offset, false)
+ }
+ return offset + 4
+}
+
+Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) {
+ var limit = Math.pow(2, 8 * byteLength - 1)
+
+ checkInt(this, value, offset, byteLength, limit - 1, -limit)
+ }
+
+ var i = 0
+ var mul = 1
+ var sub = 0
+ this[offset] = value & 0xFF
+ while (++i < byteLength && (mul *= 0x100)) {
+ if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
+ sub = 1
+ }
+ this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
+ }
+
+ return offset + byteLength
+}
+
+Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) {
+ var limit = Math.pow(2, 8 * byteLength - 1)
+
+ checkInt(this, value, offset, byteLength, limit - 1, -limit)
+ }
+
+ var i = byteLength - 1
+ var mul = 1
+ var sub = 0
+ this[offset + i] = value & 0xFF
+ while (--i >= 0 && (mul *= 0x100)) {
+ if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
+ sub = 1
+ }
+ this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
+ }
+
+ return offset + byteLength
+}
+
+Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80)
+ if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value)
+ if (value < 0) value = 0xff + value + 1
+ this[offset] = (value & 0xff)
+ return offset + 1
+}
+
+Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ this[offset] = (value & 0xff)
+ this[offset + 1] = (value >>> 8)
+ } else {
+ objectWriteUInt16(this, value, offset, true)
+ }
+ return offset + 2
+}
+
+Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ this[offset] = (value >>> 8)
+ this[offset + 1] = (value & 0xff)
+ } else {
+ objectWriteUInt16(this, value, offset, false)
+ }
+ return offset + 2
+}
+
+Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ this[offset] = (value & 0xff)
+ this[offset + 1] = (value >>> 8)
+ this[offset + 2] = (value >>> 16)
+ this[offset + 3] = (value >>> 24)
+ } else {
+ objectWriteUInt32(this, value, offset, true)
+ }
+ return offset + 4
+}
+
+Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
+ value = +value
+ offset = offset | 0
+ if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
+ if (value < 0) value = 0xffffffff + value + 1
+ if (Buffer.TYPED_ARRAY_SUPPORT) {
+ this[offset] = (value >>> 24)
+ this[offset + 1] = (value >>> 16)
+ this[offset + 2] = (value >>> 8)
+ this[offset + 3] = (value & 0xff)
+ } else {
+ objectWriteUInt32(this, value, offset, false)
+ }
+ return offset + 4
+}
+
+function checkIEEE754 (buf, value, offset, ext, max, min) {
+ if (offset + ext > buf.length) throw new RangeError('Index out of range')
+ if (offset < 0) throw new RangeError('Index out of range')
+}
+
+function writeFloat (buf, value, offset, littleEndian, noAssert) {
+ if (!noAssert) {
+ checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)
+ }
+ ieee754.write(buf, value, offset, littleEndian, 23, 4)
+ return offset + 4
+}
+
+Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
+ return writeFloat(this, value, offset, true, noAssert)
+}
+
+Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
+ return writeFloat(this, value, offset, false, noAssert)
+}
+
+function writeDouble (buf, value, offset, littleEndian, noAssert) {
+ if (!noAssert) {
+ checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)
+ }
+ ieee754.write(buf, value, offset, littleEndian, 52, 8)
+ return offset + 8
+}
+
+Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
+ return writeDouble(this, value, offset, true, noAssert)
+}
+
+Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
+ return writeDouble(this, value, offset, false, noAssert)
+}
+
+// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
+Buffer.prototype.copy = function copy (target, targetStart, start, end) {
+ if (!start) start = 0
+ if (!end && end !== 0) end = this.length
+ if (targetStart >= target.length) targetStart = target.length
+ if (!targetStart) targetStart = 0
+ if (end > 0 && end < start) end = start
+
+ // Copy 0 bytes; we're done
+ if (end === start) return 0
+ if (target.length === 0 || this.length === 0) return 0
+
+ // Fatal error conditions
+ if (targetStart < 0) {
+ throw new RangeError('targetStart out of bounds')
+ }
+ if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds')
+ if (end < 0) throw new RangeError('sourceEnd out of bounds')
+
+ // Are we oob?
+ if (end > this.length) end = this.length
+ if (target.length - targetStart < end - start) {
+ end = target.length - targetStart + start
+ }
+
+ var len = end - start
+ var i
+
+ if (this === target && start < targetStart && targetStart < end) {
+ // descending copy from end
+ for (i = len - 1; i >= 0; --i) {
+ target[i + targetStart] = this[i + start]
+ }
+ } else if (len < 1000 || !Buffer.TYPED_ARRAY_SUPPORT) {
+ // ascending copy from start
+ for (i = 0; i < len; ++i) {
+ target[i + targetStart] = this[i + start]
+ }
+ } else {
+ Uint8Array.prototype.set.call(
+ target,
+ this.subarray(start, start + len),
+ targetStart
+ )
+ }
+
+ return len
+}
+
+// Usage:
+// buffer.fill(number[, offset[, end]])
+// buffer.fill(buffer[, offset[, end]])
+// buffer.fill(string[, offset[, end]][, encoding])
+Buffer.prototype.fill = function fill (val, start, end, encoding) {
+ // Handle string cases:
+ if (typeof val === 'string') {
+ if (typeof start === 'string') {
+ encoding = start
+ start = 0
+ end = this.length
+ } else if (typeof end === 'string') {
+ encoding = end
+ end = this.length
+ }
+ if (val.length === 1) {
+ var code = val.charCodeAt(0)
+ if (code < 256) {
+ val = code
+ }
+ }
+ if (encoding !== undefined && typeof encoding !== 'string') {
+ throw new TypeError('encoding must be a string')
+ }
+ if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
+ throw new TypeError('Unknown encoding: ' + encoding)
+ }
+ } else if (typeof val === 'number') {
+ val = val & 255
+ }
+
+ // Invalid ranges are not set to a default, so can range check early.
+ if (start < 0 || this.length < start || this.length < end) {
+ throw new RangeError('Out of range index')
+ }
+
+ if (end <= start) {
+ return this
+ }
+
+ start = start >>> 0
+ end = end === undefined ? this.length : end >>> 0
+
+ if (!val) val = 0
+
+ var i
+ if (typeof val === 'number') {
+ for (i = start; i < end; ++i) {
+ this[i] = val
+ }
+ } else {
+ var bytes = Buffer.isBuffer(val)
+ ? val
+ : utf8ToBytes(new Buffer(val, encoding).toString())
+ var len = bytes.length
+ for (i = 0; i < end - start; ++i) {
+ this[i + start] = bytes[i % len]
+ }
+ }
+
+ return this
+}
+
+// HELPER FUNCTIONS
+// ================
+
+var INVALID_BASE64_RE = /[^+\/0-9A-Za-z-_]/g
+
+function base64clean (str) {
+ // Node strips out invalid characters like \n and \t from the string, base64-js does not
+ str = stringtrim(str).replace(INVALID_BASE64_RE, '')
+ // Node converts strings with length < 2 to ''
+ if (str.length < 2) return ''
+ // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
+ while (str.length % 4 !== 0) {
+ str = str + '='
+ }
+ return str
+}
+
+function stringtrim (str) {
+ if (str.trim) return str.trim()
+ return str.replace(/^\s+|\s+$/g, '')
+}
+
+function toHex (n) {
+ if (n < 16) return '0' + n.toString(16)
+ return n.toString(16)
+}
+
+function utf8ToBytes (string, units) {
+ units = units || Infinity
+ var codePoint
+ var length = string.length
+ var leadSurrogate = null
+ var bytes = []
+
+ for (var i = 0; i < length; ++i) {
+ codePoint = string.charCodeAt(i)
+
+ // is surrogate component
+ if (codePoint > 0xD7FF && codePoint < 0xE000) {
+ // last char was a lead
+ if (!leadSurrogate) {
+ // no lead yet
+ if (codePoint > 0xDBFF) {
+ // unexpected trail
+ if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
+ continue
+ } else if (i + 1 === length) {
+ // unpaired lead
+ if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
+ continue
+ }
+
+ // valid lead
+ leadSurrogate = codePoint
+
+ continue
+ }
+
+ // 2 leads in a row
+ if (codePoint < 0xDC00) {
+ if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
+ leadSurrogate = codePoint
+ continue
+ }
+
+ // valid surrogate pair
+ codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000
+ } else if (leadSurrogate) {
+ // valid bmp char, but last char was a lead
+ if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
+ }
+
+ leadSurrogate = null
+
+ // encode utf8
+ if (codePoint < 0x80) {
+ if ((units -= 1) < 0) break
+ bytes.push(codePoint)
+ } else if (codePoint < 0x800) {
+ if ((units -= 2) < 0) break
+ bytes.push(
+ codePoint >> 0x6 | 0xC0,
+ codePoint & 0x3F | 0x80
+ )
+ } else if (codePoint < 0x10000) {
+ if ((units -= 3) < 0) break
+ bytes.push(
+ codePoint >> 0xC | 0xE0,
+ codePoint >> 0x6 & 0x3F | 0x80,
+ codePoint & 0x3F | 0x80
+ )
+ } else if (codePoint < 0x110000) {
+ if ((units -= 4) < 0) break
+ bytes.push(
+ codePoint >> 0x12 | 0xF0,
+ codePoint >> 0xC & 0x3F | 0x80,
+ codePoint >> 0x6 & 0x3F | 0x80,
+ codePoint & 0x3F | 0x80
+ )
+ } else {
+ throw new Error('Invalid code point')
+ }
+ }
+
+ return bytes
+}
+
+function asciiToBytes (str) {
+ var byteArray = []
+ for (var i = 0; i < str.length; ++i) {
+ // Node's code seems to be doing this and not & 0x7F..
+ byteArray.push(str.charCodeAt(i) & 0xFF)
+ }
+ return byteArray
+}
+
+function utf16leToBytes (str, units) {
+ var c, hi, lo
+ var byteArray = []
+ for (var i = 0; i < str.length; ++i) {
+ if ((units -= 2) < 0) break
+
+ c = str.charCodeAt(i)
+ hi = c >> 8
+ lo = c % 256
+ byteArray.push(lo)
+ byteArray.push(hi)
+ }
+
+ return byteArray
+}
+
+function base64ToBytes (str) {
+ return base64.toByteArray(base64clean(str))
+}
+
+function blitBuffer (src, dst, offset, length) {
+ for (var i = 0; i < length; ++i) {
+ if ((i + offset >= dst.length) || (i >= src.length)) break
+ dst[i + offset] = src[i]
+ }
+ return i
+}
+
+function isnan (val) {
+ return val !== val // eslint-disable-line no-self-compare
+}
+
+/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(4)))
+
+/***/ }),
+/* 19 */
+/***/ (function(module, exports, __webpack_require__) {
+
+exports = module.exports = __webpack_require__(1)(undefined);
+// imports
+
+
+// module
+exports.push([module.i, "\n.cell[data-v-3ac4c361] {\n flex-direction: column;\n}\n", ""]);
+
+// exports
+
+
+/***/ }),
+/* 20 */
+/***/ (function(module, exports, __webpack_require__) {
+
+exports = module.exports = __webpack_require__(1)(undefined);
+// imports
+
+
+// module
+exports.push([module.i, "\n.cell,\n.input,\n.output {\n display: flex;\n width: 100%;\n margin-bottom: 10px;\n}\n.cell pre {\n margin: 0;\n width: 100%;\n}\n", ""]);
+
+// exports
+
+
+/***/ }),
+/* 21 */
+/***/ (function(module, exports, __webpack_require__) {
+
+exports = module.exports = __webpack_require__(1)(undefined);
+// imports
+
+
+// module
+exports.push([module.i, "\n.prompt[data-v-4f6bf458] {\n padding: 0 10px;\n min-width: 7em;\n font-family: monospace;\n}\n", ""]);
+
+// exports
+
+
+/***/ }),
+/* 22 */
+/***/ (function(module, exports, __webpack_require__) {
+
+exports = module.exports = __webpack_require__(1)(undefined);
+// imports
+
+
+// module
+exports.push([module.i, "\n.markdown .katex {\n display: block;\n text-align: center;\n}\n", ""]);
+
+// exports
+
+
+/***/ }),
+/* 23 */
+/***/ (function(module, exports) {
+
+exports.read = function (buffer, offset, isLE, mLen, nBytes) {
+ var e, m
+ var eLen = nBytes * 8 - mLen - 1
+ var eMax = (1 << eLen) - 1
+ var eBias = eMax >> 1
+ var nBits = -7
+ var i = isLE ? (nBytes - 1) : 0
+ var d = isLE ? -1 : 1
+ var s = buffer[offset + i]
+
+ i += d
+
+ e = s & ((1 << (-nBits)) - 1)
+ s >>= (-nBits)
+ nBits += eLen
+ for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8) {}
+
+ m = e & ((1 << (-nBits)) - 1)
+ e >>= (-nBits)
+ nBits += mLen
+ for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8) {}
+
+ if (e === 0) {
+ e = 1 - eBias
+ } else if (e === eMax) {
+ return m ? NaN : ((s ? -1 : 1) * Infinity)
+ } else {
+ m = m + Math.pow(2, mLen)
+ e = e - eBias
+ }
+ return (s ? -1 : 1) * m * Math.pow(2, e - mLen)
+}
+
+exports.write = function (buffer, value, offset, isLE, mLen, nBytes) {
+ var e, m, c
+ var eLen = nBytes * 8 - mLen - 1
+ var eMax = (1 << eLen) - 1
+ var eBias = eMax >> 1
+ var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0)
+ var i = isLE ? 0 : (nBytes - 1)
+ var d = isLE ? 1 : -1
+ var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0
+
+ value = Math.abs(value)
+
+ if (isNaN(value) || value === Infinity) {
+ m = isNaN(value) ? 1 : 0
+ e = eMax
+ } else {
+ e = Math.floor(Math.log(value) / Math.LN2)
+ if (value * (c = Math.pow(2, -e)) < 1) {
+ e--
+ c *= 2
+ }
+ if (e + eBias >= 1) {
+ value += rt / c
+ } else {
+ value += rt * Math.pow(2, 1 - eBias)
+ }
+ if (value * c >= 2) {
+ e++
+ c /= 2
+ }
+
+ if (e + eBias >= eMax) {
+ m = 0
+ e = eMax
+ } else if (e + eBias >= 1) {
+ m = (value * c - 1) * Math.pow(2, mLen)
+ e = e + eBias
+ } else {
+ m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen)
+ e = 0
+ }
+ }
+
+ for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {}
+
+ e = (e << mLen) | m
+ eLen += mLen
+ for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {}
+
+ buffer[offset + i - d] |= s * 128
+}
+
+
+/***/ }),
+/* 24 */
+/***/ (function(module, exports) {
+
+var toString = {}.toString;
+
+module.exports = Array.isArray || function (arr) {
+ return toString.call(arr) == '[object Array]';
+};
+
+
+/***/ }),
+/* 25 */
/***/ (function(module, exports, __webpack_require__) {
/* WEBPACK VAR INJECTION */(function(global) {/**
@@ -2094,10 +4491,10 @@ if (true) {
return this || (typeof window !== 'undefined' ? window : global);
}());
-/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(2)))
+/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(4)))
/***/ }),
-/* 16 */
+/* 26 */
/***/ (function(module, exports) {
Prism.languages.python= {
@@ -2130,7 +4527,7 @@ Prism.languages.python= {
/***/ }),
-/* 17 */
+/* 27 */
/***/ (function(module, exports, __webpack_require__) {
/* WEBPACK VAR INJECTION */(function(global) {(function(){
@@ -2163,10 +4560,10 @@ Prism.hooks.add('wrap', function (env) {
})();
-/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(2)))
+/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(4)))
/***/ }),
-/* 18 */
+/* 28 */
/***/ (function(module, exports, __webpack_require__) {
/* WEBPACK VAR INJECTION */(function(global) {
@@ -2965,21 +5362,21 @@ Prism.languages.js = Prism.languages.javascript;
})();
-/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(2)))
+/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(4)))
/***/ }),
-/* 19 */
+/* 29 */
/***/ (function(module, exports, __webpack_require__) {
/* styles */
-__webpack_require__(49)
+__webpack_require__(42)
var Component = __webpack_require__(0)(
/* script */
- __webpack_require__(5),
+ __webpack_require__(7),
/* template */
- __webpack_require__(26),
+ __webpack_require__(36),
/* scopeId */
"data-v-3ac4c361",
/* cssModules */
@@ -3006,18 +5403,18 @@ module.exports = Component.exports
/***/ }),
-/* 20 */
+/* 30 */
/***/ (function(module, exports, __webpack_require__) {
/* styles */
-__webpack_require__(47)
+__webpack_require__(45)
var Component = __webpack_require__(0)(
/* script */
- __webpack_require__(7),
+ __webpack_require__(9),
/* template */
- __webpack_require__(30),
+ __webpack_require__(40),
/* scopeId */
null,
/* cssModules */
@@ -3044,14 +5441,14 @@ module.exports = Component.exports
/***/ }),
-/* 21 */
+/* 31 */
/***/ (function(module, exports, __webpack_require__) {
var Component = __webpack_require__(0)(
/* script */
- __webpack_require__(8),
+ __webpack_require__(10),
/* template */
- __webpack_require__(27),
+ __webpack_require__(37),
/* scopeId */
null,
/* cssModules */
@@ -3078,14 +5475,14 @@ module.exports = Component.exports
/***/ }),
-/* 22 */
+/* 32 */
/***/ (function(module, exports, __webpack_require__) {
var Component = __webpack_require__(0)(
/* script */
- __webpack_require__(9),
+ __webpack_require__(11),
/* template */
- __webpack_require__(24),
+ __webpack_require__(34),
/* scopeId */
null,
/* cssModules */
@@ -3112,14 +5509,14 @@ module.exports = Component.exports
/***/ }),
-/* 23 */
+/* 33 */
/***/ (function(module, exports, __webpack_require__) {
var Component = __webpack_require__(0)(
/* script */
- __webpack_require__(10),
+ __webpack_require__(12),
/* template */
- __webpack_require__(25),
+ __webpack_require__(35),
/* scopeId */
null,
/* cssModules */
@@ -3146,7 +5543,7 @@ module.exports = Component.exports
/***/ }),
-/* 24 */
+/* 34 */
/***/ (function(module, exports, __webpack_require__) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
@@ -3167,7 +5564,7 @@ if (false) {
}
/***/ }),
-/* 25 */
+/* 35 */
/***/ (function(module, exports, __webpack_require__) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
@@ -3191,7 +5588,7 @@ if (false) {
}
/***/ }),
-/* 26 */
+/* 36 */
/***/ (function(module, exports, __webpack_require__) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
@@ -3221,7 +5618,7 @@ if (false) {
}
/***/ }),
-/* 27 */
+/* 37 */
/***/ (function(module, exports, __webpack_require__) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
@@ -3242,11 +5639,11 @@ if (false) {
}
/***/ }),
-/* 28 */
+/* 38 */
/***/ (function(module, exports, __webpack_require__) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
- return (_vm.hasNotebook) ? _c('div', _vm._l((_vm.notebook.cells), function(cell, index) {
+ return (_vm.hasNotebook) ? _c('div', _vm._l((_vm.cells), function(cell, index) {
return _c(_vm.cellType(cell.cell_type), {
key: index,
tag: "component",
@@ -3266,7 +5663,7 @@ if (false) {
}
/***/ }),
-/* 29 */
+/* 39 */
/***/ (function(module, exports, __webpack_require__) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
@@ -3283,7 +5680,7 @@ if (false) {
}
/***/ }),
-/* 30 */
+/* 40 */
/***/ (function(module, exports, __webpack_require__) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
@@ -3305,7 +5702,7 @@ if (false) {
}
/***/ }),
-/* 31 */
+/* 41 */
/***/ (function(module, exports, __webpack_require__) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
@@ -3334,2153 +5731,23 @@ if (false) {
}
/***/ }),
-/* 32 */
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-
-
-var Notebook = __webpack_require__(4);
-
-module.exports = {
- install: function install(_vue) {
- _vue.component('notebook-lab', Notebook);
- }
-};
-
-/***/ }),
-/* 33 */
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-
-
-exports.byteLength = byteLength
-exports.toByteArray = toByteArray
-exports.fromByteArray = fromByteArray
-
-var lookup = []
-var revLookup = []
-var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array
-
-var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
-for (var i = 0, len = code.length; i < len; ++i) {
- lookup[i] = code[i]
- revLookup[code.charCodeAt(i)] = i
-}
-
-revLookup['-'.charCodeAt(0)] = 62
-revLookup['_'.charCodeAt(0)] = 63
-
-function placeHoldersCount (b64) {
- var len = b64.length
- if (len % 4 > 0) {
- throw new Error('Invalid string. Length must be a multiple of 4')
- }
-
- // the number of equal signs (place holders)
- // if there are two placeholders, than the two characters before it
- // represent one byte
- // if there is only one, then the three characters before it represent 2 bytes
- // this is just a cheap hack to not do indexOf twice
- return b64[len - 2] === '=' ? 2 : b64[len - 1] === '=' ? 1 : 0
-}
-
-function byteLength (b64) {
- // base64 is 4/3 + up to two characters of the original data
- return b64.length * 3 / 4 - placeHoldersCount(b64)
-}
-
-function toByteArray (b64) {
- var i, j, l, tmp, placeHolders, arr
- var len = b64.length
- placeHolders = placeHoldersCount(b64)
-
- arr = new Arr(len * 3 / 4 - placeHolders)
-
- // if there are placeholders, only get up to the last complete 4 chars
- l = placeHolders > 0 ? len - 4 : len
-
- var L = 0
-
- for (i = 0, j = 0; i < l; i += 4, j += 3) {
- tmp = (revLookup[b64.charCodeAt(i)] << 18) | (revLookup[b64.charCodeAt(i + 1)] << 12) | (revLookup[b64.charCodeAt(i + 2)] << 6) | revLookup[b64.charCodeAt(i + 3)]
- arr[L++] = (tmp >> 16) & 0xFF
- arr[L++] = (tmp >> 8) & 0xFF
- arr[L++] = tmp & 0xFF
- }
-
- if (placeHolders === 2) {
- tmp = (revLookup[b64.charCodeAt(i)] << 2) | (revLookup[b64.charCodeAt(i + 1)] >> 4)
- arr[L++] = tmp & 0xFF
- } else if (placeHolders === 1) {
- tmp = (revLookup[b64.charCodeAt(i)] << 10) | (revLookup[b64.charCodeAt(i + 1)] << 4) | (revLookup[b64.charCodeAt(i + 2)] >> 2)
- arr[L++] = (tmp >> 8) & 0xFF
- arr[L++] = tmp & 0xFF
- }
-
- return arr
-}
-
-function tripletToBase64 (num) {
- return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F]
-}
-
-function encodeChunk (uint8, start, end) {
- var tmp
- var output = []
- for (var i = start; i < end; i += 3) {
- tmp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2])
- output.push(tripletToBase64(tmp))
- }
- return output.join('')
-}
-
-function fromByteArray (uint8) {
- var tmp
- var len = uint8.length
- var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes
- var output = ''
- var parts = []
- var maxChunkLength = 16383 // must be multiple of 3
-
- // go through the array every three bytes, we'll deal with trailing stuff later
- for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
- parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)))
- }
-
- // pad the end with zeros, but make sure to not forget the extra bytes
- if (extraBytes === 1) {
- tmp = uint8[len - 1]
- output += lookup[tmp >> 2]
- output += lookup[(tmp << 4) & 0x3F]
- output += '=='
- } else if (extraBytes === 2) {
- tmp = (uint8[len - 2] << 8) + (uint8[len - 1])
- output += lookup[tmp >> 10]
- output += lookup[(tmp >> 4) & 0x3F]
- output += lookup[(tmp << 2) & 0x3F]
- output += '='
- }
-
- parts.push(output)
-
- return parts.join('')
-}
-
-
-/***/ }),
-/* 34 */
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-/* WEBPACK VAR INJECTION */(function(global) {/*!
- * The buffer module from node.js, for the browser.
- *
- * @author Feross Aboukhadijeh <feross@feross.org> <http://feross.org>
- * @license MIT
- */
-/* eslint-disable no-proto */
-
-
-
-var base64 = __webpack_require__(33)
-var ieee754 = __webpack_require__(37)
-var isArray = __webpack_require__(38)
-
-exports.Buffer = Buffer
-exports.SlowBuffer = SlowBuffer
-exports.INSPECT_MAX_BYTES = 50
-
-/**
- * If `Buffer.TYPED_ARRAY_SUPPORT`:
- * === true Use Uint8Array implementation (fastest)
- * === false Use Object implementation (most compatible, even IE6)
- *
- * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
- * Opera 11.6+, iOS 4.2+.
- *
- * Due to various browser bugs, sometimes the Object implementation will be used even
- * when the browser supports typed arrays.
- *
- * Note:
- *
- * - Firefox 4-29 lacks support for adding new properties to `Uint8Array` instances,
- * See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438.
- *
- * - Chrome 9-10 is missing the `TypedArray.prototype.subarray` function.
- *
- * - IE10 has a broken `TypedArray.prototype.subarray` function which returns arrays of
- * incorrect length in some situations.
-
- * We detect these buggy browsers and set `Buffer.TYPED_ARRAY_SUPPORT` to `false` so they
- * get the Object implementation, which is slower but behaves correctly.
- */
-Buffer.TYPED_ARRAY_SUPPORT = global.TYPED_ARRAY_SUPPORT !== undefined
- ? global.TYPED_ARRAY_SUPPORT
- : typedArraySupport()
-
-/*
- * Export kMaxLength after typed array support is determined.
- */
-exports.kMaxLength = kMaxLength()
-
-function typedArraySupport () {
- try {
- var arr = new Uint8Array(1)
- arr.__proto__ = {__proto__: Uint8Array.prototype, foo: function () { return 42 }}
- return arr.foo() === 42 && // typed array instances can be augmented
- typeof arr.subarray === 'function' && // chrome 9-10 lack `subarray`
- arr.subarray(1, 1).byteLength === 0 // ie10 has broken `subarray`
- } catch (e) {
- return false
- }
-}
-
-function kMaxLength () {
- return Buffer.TYPED_ARRAY_SUPPORT
- ? 0x7fffffff
- : 0x3fffffff
-}
-
-function createBuffer (that, length) {
- if (kMaxLength() < length) {
- throw new RangeError('Invalid typed array length')
- }
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- // Return an augmented `Uint8Array` instance, for best performance
- that = new Uint8Array(length)
- that.__proto__ = Buffer.prototype
- } else {
- // Fallback: Return an object instance of the Buffer class
- if (that === null) {
- that = new Buffer(length)
- }
- that.length = length
- }
-
- return that
-}
-
-/**
- * The Buffer constructor returns instances of `Uint8Array` that have their
- * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
- * `Uint8Array`, so the returned instances will have all the node `Buffer` methods
- * and the `Uint8Array` methods. Square bracket notation works as expected -- it
- * returns a single octet.
- *
- * The `Uint8Array` prototype remains unmodified.
- */
-
-function Buffer (arg, encodingOrOffset, length) {
- if (!Buffer.TYPED_ARRAY_SUPPORT && !(this instanceof Buffer)) {
- return new Buffer(arg, encodingOrOffset, length)
- }
-
- // Common case.
- if (typeof arg === 'number') {
- if (typeof encodingOrOffset === 'string') {
- throw new Error(
- 'If encoding is specified then the first argument must be a string'
- )
- }
- return allocUnsafe(this, arg)
- }
- return from(this, arg, encodingOrOffset, length)
-}
-
-Buffer.poolSize = 8192 // not used by this implementation
-
-// TODO: Legacy, not needed anymore. Remove in next major version.
-Buffer._augment = function (arr) {
- arr.__proto__ = Buffer.prototype
- return arr
-}
-
-function from (that, value, encodingOrOffset, length) {
- if (typeof value === 'number') {
- throw new TypeError('"value" argument must not be a number')
- }
-
- if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) {
- return fromArrayBuffer(that, value, encodingOrOffset, length)
- }
-
- if (typeof value === 'string') {
- return fromString(that, value, encodingOrOffset)
- }
-
- return fromObject(that, value)
-}
-
-/**
- * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
- * if value is a number.
- * Buffer.from(str[, encoding])
- * Buffer.from(array)
- * Buffer.from(buffer)
- * Buffer.from(arrayBuffer[, byteOffset[, length]])
- **/
-Buffer.from = function (value, encodingOrOffset, length) {
- return from(null, value, encodingOrOffset, length)
-}
-
-if (Buffer.TYPED_ARRAY_SUPPORT) {
- Buffer.prototype.__proto__ = Uint8Array.prototype
- Buffer.__proto__ = Uint8Array
- if (typeof Symbol !== 'undefined' && Symbol.species &&
- Buffer[Symbol.species] === Buffer) {
- // Fix subarray() in ES2016. See: https://github.com/feross/buffer/pull/97
- Object.defineProperty(Buffer, Symbol.species, {
- value: null,
- configurable: true
- })
- }
-}
-
-function assertSize (size) {
- if (typeof size !== 'number') {
- throw new TypeError('"size" argument must be a number')
- } else if (size < 0) {
- throw new RangeError('"size" argument must not be negative')
- }
-}
-
-function alloc (that, size, fill, encoding) {
- assertSize(size)
- if (size <= 0) {
- return createBuffer(that, size)
- }
- if (fill !== undefined) {
- // Only pay attention to encoding if it's a string. This
- // prevents accidentally sending in a number that would
- // be interpretted as a start offset.
- return typeof encoding === 'string'
- ? createBuffer(that, size).fill(fill, encoding)
- : createBuffer(that, size).fill(fill)
- }
- return createBuffer(that, size)
-}
-
-/**
- * Creates a new filled Buffer instance.
- * alloc(size[, fill[, encoding]])
- **/
-Buffer.alloc = function (size, fill, encoding) {
- return alloc(null, size, fill, encoding)
-}
-
-function allocUnsafe (that, size) {
- assertSize(size)
- that = createBuffer(that, size < 0 ? 0 : checked(size) | 0)
- if (!Buffer.TYPED_ARRAY_SUPPORT) {
- for (var i = 0; i < size; ++i) {
- that[i] = 0
- }
- }
- return that
-}
-
-/**
- * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
- * */
-Buffer.allocUnsafe = function (size) {
- return allocUnsafe(null, size)
-}
-/**
- * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
- */
-Buffer.allocUnsafeSlow = function (size) {
- return allocUnsafe(null, size)
-}
-
-function fromString (that, string, encoding) {
- if (typeof encoding !== 'string' || encoding === '') {
- encoding = 'utf8'
- }
-
- if (!Buffer.isEncoding(encoding)) {
- throw new TypeError('"encoding" must be a valid string encoding')
- }
-
- var length = byteLength(string, encoding) | 0
- that = createBuffer(that, length)
-
- var actual = that.write(string, encoding)
-
- if (actual !== length) {
- // Writing a hex string, for example, that contains invalid characters will
- // cause everything after the first invalid character to be ignored. (e.g.
- // 'abxxcd' will be treated as 'ab')
- that = that.slice(0, actual)
- }
-
- return that
-}
-
-function fromArrayLike (that, array) {
- var length = array.length < 0 ? 0 : checked(array.length) | 0
- that = createBuffer(that, length)
- for (var i = 0; i < length; i += 1) {
- that[i] = array[i] & 255
- }
- return that
-}
-
-function fromArrayBuffer (that, array, byteOffset, length) {
- array.byteLength // this throws if `array` is not a valid ArrayBuffer
-
- if (byteOffset < 0 || array.byteLength < byteOffset) {
- throw new RangeError('\'offset\' is out of bounds')
- }
-
- if (array.byteLength < byteOffset + (length || 0)) {
- throw new RangeError('\'length\' is out of bounds')
- }
-
- if (byteOffset === undefined && length === undefined) {
- array = new Uint8Array(array)
- } else if (length === undefined) {
- array = new Uint8Array(array, byteOffset)
- } else {
- array = new Uint8Array(array, byteOffset, length)
- }
-
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- // Return an augmented `Uint8Array` instance, for best performance
- that = array
- that.__proto__ = Buffer.prototype
- } else {
- // Fallback: Return an object instance of the Buffer class
- that = fromArrayLike(that, array)
- }
- return that
-}
-
-function fromObject (that, obj) {
- if (Buffer.isBuffer(obj)) {
- var len = checked(obj.length) | 0
- that = createBuffer(that, len)
-
- if (that.length === 0) {
- return that
- }
-
- obj.copy(that, 0, 0, len)
- return that
- }
-
- if (obj) {
- if ((typeof ArrayBuffer !== 'undefined' &&
- obj.buffer instanceof ArrayBuffer) || 'length' in obj) {
- if (typeof obj.length !== 'number' || isnan(obj.length)) {
- return createBuffer(that, 0)
- }
- return fromArrayLike(that, obj)
- }
-
- if (obj.type === 'Buffer' && isArray(obj.data)) {
- return fromArrayLike(that, obj.data)
- }
- }
-
- throw new TypeError('First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.')
-}
-
-function checked (length) {
- // Note: cannot use `length < kMaxLength()` here because that fails when
- // length is NaN (which is otherwise coerced to zero.)
- if (length >= kMaxLength()) {
- throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
- 'size: 0x' + kMaxLength().toString(16) + ' bytes')
- }
- return length | 0
-}
-
-function SlowBuffer (length) {
- if (+length != length) { // eslint-disable-line eqeqeq
- length = 0
- }
- return Buffer.alloc(+length)
-}
-
-Buffer.isBuffer = function isBuffer (b) {
- return !!(b != null && b._isBuffer)
-}
-
-Buffer.compare = function compare (a, b) {
- if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) {
- throw new TypeError('Arguments must be Buffers')
- }
-
- if (a === b) return 0
-
- var x = a.length
- var y = b.length
-
- for (var i = 0, len = Math.min(x, y); i < len; ++i) {
- if (a[i] !== b[i]) {
- x = a[i]
- y = b[i]
- break
- }
- }
-
- if (x < y) return -1
- if (y < x) return 1
- return 0
-}
-
-Buffer.isEncoding = function isEncoding (encoding) {
- switch (String(encoding).toLowerCase()) {
- case 'hex':
- case 'utf8':
- case 'utf-8':
- case 'ascii':
- case 'latin1':
- case 'binary':
- case 'base64':
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return true
- default:
- return false
- }
-}
-
-Buffer.concat = function concat (list, length) {
- if (!isArray(list)) {
- throw new TypeError('"list" argument must be an Array of Buffers')
- }
-
- if (list.length === 0) {
- return Buffer.alloc(0)
- }
-
- var i
- if (length === undefined) {
- length = 0
- for (i = 0; i < list.length; ++i) {
- length += list[i].length
- }
- }
-
- var buffer = Buffer.allocUnsafe(length)
- var pos = 0
- for (i = 0; i < list.length; ++i) {
- var buf = list[i]
- if (!Buffer.isBuffer(buf)) {
- throw new TypeError('"list" argument must be an Array of Buffers')
- }
- buf.copy(buffer, pos)
- pos += buf.length
- }
- return buffer
-}
-
-function byteLength (string, encoding) {
- if (Buffer.isBuffer(string)) {
- return string.length
- }
- if (typeof ArrayBuffer !== 'undefined' && typeof ArrayBuffer.isView === 'function' &&
- (ArrayBuffer.isView(string) || string instanceof ArrayBuffer)) {
- return string.byteLength
- }
- if (typeof string !== 'string') {
- string = '' + string
- }
-
- var len = string.length
- if (len === 0) return 0
-
- // Use a for loop to avoid recursion
- var loweredCase = false
- for (;;) {
- switch (encoding) {
- case 'ascii':
- case 'latin1':
- case 'binary':
- return len
- case 'utf8':
- case 'utf-8':
- case undefined:
- return utf8ToBytes(string).length
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return len * 2
- case 'hex':
- return len >>> 1
- case 'base64':
- return base64ToBytes(string).length
- default:
- if (loweredCase) return utf8ToBytes(string).length // assume utf8
- encoding = ('' + encoding).toLowerCase()
- loweredCase = true
- }
- }
-}
-Buffer.byteLength = byteLength
-
-function slowToString (encoding, start, end) {
- var loweredCase = false
-
- // No need to verify that "this.length <= MAX_UINT32" since it's a read-only
- // property of a typed array.
-
- // This behaves neither like String nor Uint8Array in that we set start/end
- // to their upper/lower bounds if the value passed is out of range.
- // undefined is handled specially as per ECMA-262 6th Edition,
- // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
- if (start === undefined || start < 0) {
- start = 0
- }
- // Return early if start > this.length. Done here to prevent potential uint32
- // coercion fail below.
- if (start > this.length) {
- return ''
- }
-
- if (end === undefined || end > this.length) {
- end = this.length
- }
-
- if (end <= 0) {
- return ''
- }
-
- // Force coersion to uint32. This will also coerce falsey/NaN values to 0.
- end >>>= 0
- start >>>= 0
-
- if (end <= start) {
- return ''
- }
-
- if (!encoding) encoding = 'utf8'
-
- while (true) {
- switch (encoding) {
- case 'hex':
- return hexSlice(this, start, end)
-
- case 'utf8':
- case 'utf-8':
- return utf8Slice(this, start, end)
-
- case 'ascii':
- return asciiSlice(this, start, end)
-
- case 'latin1':
- case 'binary':
- return latin1Slice(this, start, end)
-
- case 'base64':
- return base64Slice(this, start, end)
-
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return utf16leSlice(this, start, end)
-
- default:
- if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
- encoding = (encoding + '').toLowerCase()
- loweredCase = true
- }
- }
-}
-
-// The property is used by `Buffer.isBuffer` and `is-buffer` (in Safari 5-7) to detect
-// Buffer instances.
-Buffer.prototype._isBuffer = true
-
-function swap (b, n, m) {
- var i = b[n]
- b[n] = b[m]
- b[m] = i
-}
-
-Buffer.prototype.swap16 = function swap16 () {
- var len = this.length
- if (len % 2 !== 0) {
- throw new RangeError('Buffer size must be a multiple of 16-bits')
- }
- for (var i = 0; i < len; i += 2) {
- swap(this, i, i + 1)
- }
- return this
-}
-
-Buffer.prototype.swap32 = function swap32 () {
- var len = this.length
- if (len % 4 !== 0) {
- throw new RangeError('Buffer size must be a multiple of 32-bits')
- }
- for (var i = 0; i < len; i += 4) {
- swap(this, i, i + 3)
- swap(this, i + 1, i + 2)
- }
- return this
-}
-
-Buffer.prototype.swap64 = function swap64 () {
- var len = this.length
- if (len % 8 !== 0) {
- throw new RangeError('Buffer size must be a multiple of 64-bits')
- }
- for (var i = 0; i < len; i += 8) {
- swap(this, i, i + 7)
- swap(this, i + 1, i + 6)
- swap(this, i + 2, i + 5)
- swap(this, i + 3, i + 4)
- }
- return this
-}
-
-Buffer.prototype.toString = function toString () {
- var length = this.length | 0
- if (length === 0) return ''
- if (arguments.length === 0) return utf8Slice(this, 0, length)
- return slowToString.apply(this, arguments)
-}
-
-Buffer.prototype.equals = function equals (b) {
- if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
- if (this === b) return true
- return Buffer.compare(this, b) === 0
-}
-
-Buffer.prototype.inspect = function inspect () {
- var str = ''
- var max = exports.INSPECT_MAX_BYTES
- if (this.length > 0) {
- str = this.toString('hex', 0, max).match(/.{2}/g).join(' ')
- if (this.length > max) str += ' ... '
- }
- return '<Buffer ' + str + '>'
-}
-
-Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
- if (!Buffer.isBuffer(target)) {
- throw new TypeError('Argument must be a Buffer')
- }
-
- if (start === undefined) {
- start = 0
- }
- if (end === undefined) {
- end = target ? target.length : 0
- }
- if (thisStart === undefined) {
- thisStart = 0
- }
- if (thisEnd === undefined) {
- thisEnd = this.length
- }
-
- if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
- throw new RangeError('out of range index')
- }
-
- if (thisStart >= thisEnd && start >= end) {
- return 0
- }
- if (thisStart >= thisEnd) {
- return -1
- }
- if (start >= end) {
- return 1
- }
-
- start >>>= 0
- end >>>= 0
- thisStart >>>= 0
- thisEnd >>>= 0
-
- if (this === target) return 0
-
- var x = thisEnd - thisStart
- var y = end - start
- var len = Math.min(x, y)
-
- var thisCopy = this.slice(thisStart, thisEnd)
- var targetCopy = target.slice(start, end)
-
- for (var i = 0; i < len; ++i) {
- if (thisCopy[i] !== targetCopy[i]) {
- x = thisCopy[i]
- y = targetCopy[i]
- break
- }
- }
-
- if (x < y) return -1
- if (y < x) return 1
- return 0
-}
-
-// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
-// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
-//
-// Arguments:
-// - buffer - a Buffer to search
-// - val - a string, Buffer, or number
-// - byteOffset - an index into `buffer`; will be clamped to an int32
-// - encoding - an optional encoding, relevant is val is a string
-// - dir - true for indexOf, false for lastIndexOf
-function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
- // Empty buffer means no match
- if (buffer.length === 0) return -1
-
- // Normalize byteOffset
- if (typeof byteOffset === 'string') {
- encoding = byteOffset
- byteOffset = 0
- } else if (byteOffset > 0x7fffffff) {
- byteOffset = 0x7fffffff
- } else if (byteOffset < -0x80000000) {
- byteOffset = -0x80000000
- }
- byteOffset = +byteOffset // Coerce to Number.
- if (isNaN(byteOffset)) {
- // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
- byteOffset = dir ? 0 : (buffer.length - 1)
- }
-
- // Normalize byteOffset: negative offsets start from the end of the buffer
- if (byteOffset < 0) byteOffset = buffer.length + byteOffset
- if (byteOffset >= buffer.length) {
- if (dir) return -1
- else byteOffset = buffer.length - 1
- } else if (byteOffset < 0) {
- if (dir) byteOffset = 0
- else return -1
- }
-
- // Normalize val
- if (typeof val === 'string') {
- val = Buffer.from(val, encoding)
- }
-
- // Finally, search either indexOf (if dir is true) or lastIndexOf
- if (Buffer.isBuffer(val)) {
- // Special case: looking for empty string/buffer always fails
- if (val.length === 0) {
- return -1
- }
- return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
- } else if (typeof val === 'number') {
- val = val & 0xFF // Search for a byte value [0-255]
- if (Buffer.TYPED_ARRAY_SUPPORT &&
- typeof Uint8Array.prototype.indexOf === 'function') {
- if (dir) {
- return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
- } else {
- return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
- }
- }
- return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir)
- }
-
- throw new TypeError('val must be string, number or Buffer')
-}
-
-function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
- var indexSize = 1
- var arrLength = arr.length
- var valLength = val.length
-
- if (encoding !== undefined) {
- encoding = String(encoding).toLowerCase()
- if (encoding === 'ucs2' || encoding === 'ucs-2' ||
- encoding === 'utf16le' || encoding === 'utf-16le') {
- if (arr.length < 2 || val.length < 2) {
- return -1
- }
- indexSize = 2
- arrLength /= 2
- valLength /= 2
- byteOffset /= 2
- }
- }
-
- function read (buf, i) {
- if (indexSize === 1) {
- return buf[i]
- } else {
- return buf.readUInt16BE(i * indexSize)
- }
- }
-
- var i
- if (dir) {
- var foundIndex = -1
- for (i = byteOffset; i < arrLength; i++) {
- if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
- if (foundIndex === -1) foundIndex = i
- if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
- } else {
- if (foundIndex !== -1) i -= i - foundIndex
- foundIndex = -1
- }
- }
- } else {
- if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength
- for (i = byteOffset; i >= 0; i--) {
- var found = true
- for (var j = 0; j < valLength; j++) {
- if (read(arr, i + j) !== read(val, j)) {
- found = false
- break
- }
- }
- if (found) return i
- }
- }
-
- return -1
-}
-
-Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
- return this.indexOf(val, byteOffset, encoding) !== -1
-}
-
-Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
- return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
-}
-
-Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
- return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
-}
-
-function hexWrite (buf, string, offset, length) {
- offset = Number(offset) || 0
- var remaining = buf.length - offset
- if (!length) {
- length = remaining
- } else {
- length = Number(length)
- if (length > remaining) {
- length = remaining
- }
- }
-
- // must be an even number of digits
- var strLen = string.length
- if (strLen % 2 !== 0) throw new TypeError('Invalid hex string')
-
- if (length > strLen / 2) {
- length = strLen / 2
- }
- for (var i = 0; i < length; ++i) {
- var parsed = parseInt(string.substr(i * 2, 2), 16)
- if (isNaN(parsed)) return i
- buf[offset + i] = parsed
- }
- return i
-}
-
-function utf8Write (buf, string, offset, length) {
- return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
-}
-
-function asciiWrite (buf, string, offset, length) {
- return blitBuffer(asciiToBytes(string), buf, offset, length)
-}
-
-function latin1Write (buf, string, offset, length) {
- return asciiWrite(buf, string, offset, length)
-}
-
-function base64Write (buf, string, offset, length) {
- return blitBuffer(base64ToBytes(string), buf, offset, length)
-}
-
-function ucs2Write (buf, string, offset, length) {
- return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
-}
-
-Buffer.prototype.write = function write (string, offset, length, encoding) {
- // Buffer#write(string)
- if (offset === undefined) {
- encoding = 'utf8'
- length = this.length
- offset = 0
- // Buffer#write(string, encoding)
- } else if (length === undefined && typeof offset === 'string') {
- encoding = offset
- length = this.length
- offset = 0
- // Buffer#write(string, offset[, length][, encoding])
- } else if (isFinite(offset)) {
- offset = offset | 0
- if (isFinite(length)) {
- length = length | 0
- if (encoding === undefined) encoding = 'utf8'
- } else {
- encoding = length
- length = undefined
- }
- // legacy write(string, encoding, offset, length) - remove in v0.13
- } else {
- throw new Error(
- 'Buffer.write(string, encoding, offset[, length]) is no longer supported'
- )
- }
-
- var remaining = this.length - offset
- if (length === undefined || length > remaining) length = remaining
-
- if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
- throw new RangeError('Attempt to write outside buffer bounds')
- }
-
- if (!encoding) encoding = 'utf8'
-
- var loweredCase = false
- for (;;) {
- switch (encoding) {
- case 'hex':
- return hexWrite(this, string, offset, length)
-
- case 'utf8':
- case 'utf-8':
- return utf8Write(this, string, offset, length)
-
- case 'ascii':
- return asciiWrite(this, string, offset, length)
-
- case 'latin1':
- case 'binary':
- return latin1Write(this, string, offset, length)
-
- case 'base64':
- // Warning: maxLength not taken into account in base64Write
- return base64Write(this, string, offset, length)
-
- case 'ucs2':
- case 'ucs-2':
- case 'utf16le':
- case 'utf-16le':
- return ucs2Write(this, string, offset, length)
-
- default:
- if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
- encoding = ('' + encoding).toLowerCase()
- loweredCase = true
- }
- }
-}
-
-Buffer.prototype.toJSON = function toJSON () {
- return {
- type: 'Buffer',
- data: Array.prototype.slice.call(this._arr || this, 0)
- }
-}
-
-function base64Slice (buf, start, end) {
- if (start === 0 && end === buf.length) {
- return base64.fromByteArray(buf)
- } else {
- return base64.fromByteArray(buf.slice(start, end))
- }
-}
-
-function utf8Slice (buf, start, end) {
- end = Math.min(buf.length, end)
- var res = []
-
- var i = start
- while (i < end) {
- var firstByte = buf[i]
- var codePoint = null
- var bytesPerSequence = (firstByte > 0xEF) ? 4
- : (firstByte > 0xDF) ? 3
- : (firstByte > 0xBF) ? 2
- : 1
-
- if (i + bytesPerSequence <= end) {
- var secondByte, thirdByte, fourthByte, tempCodePoint
-
- switch (bytesPerSequence) {
- case 1:
- if (firstByte < 0x80) {
- codePoint = firstByte
- }
- break
- case 2:
- secondByte = buf[i + 1]
- if ((secondByte & 0xC0) === 0x80) {
- tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F)
- if (tempCodePoint > 0x7F) {
- codePoint = tempCodePoint
- }
- }
- break
- case 3:
- secondByte = buf[i + 1]
- thirdByte = buf[i + 2]
- if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
- tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F)
- if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
- codePoint = tempCodePoint
- }
- }
- break
- case 4:
- secondByte = buf[i + 1]
- thirdByte = buf[i + 2]
- fourthByte = buf[i + 3]
- if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
- tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F)
- if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
- codePoint = tempCodePoint
- }
- }
- }
- }
-
- if (codePoint === null) {
- // we did not generate a valid codePoint so insert a
- // replacement char (U+FFFD) and advance only 1 byte
- codePoint = 0xFFFD
- bytesPerSequence = 1
- } else if (codePoint > 0xFFFF) {
- // encode to utf16 (surrogate pair dance)
- codePoint -= 0x10000
- res.push(codePoint >>> 10 & 0x3FF | 0xD800)
- codePoint = 0xDC00 | codePoint & 0x3FF
- }
-
- res.push(codePoint)
- i += bytesPerSequence
- }
-
- return decodeCodePointsArray(res)
-}
-
-// Based on http://stackoverflow.com/a/22747272/680742, the browser with
-// the lowest limit is Chrome, with 0x10000 args.
-// We go 1 magnitude less, for safety
-var MAX_ARGUMENTS_LENGTH = 0x1000
-
-function decodeCodePointsArray (codePoints) {
- var len = codePoints.length
- if (len <= MAX_ARGUMENTS_LENGTH) {
- return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
- }
-
- // Decode in chunks to avoid "call stack size exceeded".
- var res = ''
- var i = 0
- while (i < len) {
- res += String.fromCharCode.apply(
- String,
- codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
- )
- }
- return res
-}
-
-function asciiSlice (buf, start, end) {
- var ret = ''
- end = Math.min(buf.length, end)
-
- for (var i = start; i < end; ++i) {
- ret += String.fromCharCode(buf[i] & 0x7F)
- }
- return ret
-}
-
-function latin1Slice (buf, start, end) {
- var ret = ''
- end = Math.min(buf.length, end)
-
- for (var i = start; i < end; ++i) {
- ret += String.fromCharCode(buf[i])
- }
- return ret
-}
-
-function hexSlice (buf, start, end) {
- var len = buf.length
-
- if (!start || start < 0) start = 0
- if (!end || end < 0 || end > len) end = len
-
- var out = ''
- for (var i = start; i < end; ++i) {
- out += toHex(buf[i])
- }
- return out
-}
-
-function utf16leSlice (buf, start, end) {
- var bytes = buf.slice(start, end)
- var res = ''
- for (var i = 0; i < bytes.length; i += 2) {
- res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256)
- }
- return res
-}
-
-Buffer.prototype.slice = function slice (start, end) {
- var len = this.length
- start = ~~start
- end = end === undefined ? len : ~~end
-
- if (start < 0) {
- start += len
- if (start < 0) start = 0
- } else if (start > len) {
- start = len
- }
-
- if (end < 0) {
- end += len
- if (end < 0) end = 0
- } else if (end > len) {
- end = len
- }
-
- if (end < start) end = start
-
- var newBuf
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- newBuf = this.subarray(start, end)
- newBuf.__proto__ = Buffer.prototype
- } else {
- var sliceLen = end - start
- newBuf = new Buffer(sliceLen, undefined)
- for (var i = 0; i < sliceLen; ++i) {
- newBuf[i] = this[i + start]
- }
- }
-
- return newBuf
-}
-
-/*
- * Need to make sure that buffer isn't trying to write out of bounds.
- */
-function checkOffset (offset, ext, length) {
- if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
- if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
-}
-
-Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
- offset = offset | 0
- byteLength = byteLength | 0
- if (!noAssert) checkOffset(offset, byteLength, this.length)
-
- var val = this[offset]
- var mul = 1
- var i = 0
- while (++i < byteLength && (mul *= 0x100)) {
- val += this[offset + i] * mul
- }
-
- return val
-}
-
-Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
- offset = offset | 0
- byteLength = byteLength | 0
- if (!noAssert) {
- checkOffset(offset, byteLength, this.length)
- }
-
- var val = this[offset + --byteLength]
- var mul = 1
- while (byteLength > 0 && (mul *= 0x100)) {
- val += this[offset + --byteLength] * mul
- }
-
- return val
-}
-
-Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 1, this.length)
- return this[offset]
-}
-
-Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 2, this.length)
- return this[offset] | (this[offset + 1] << 8)
-}
-
-Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 2, this.length)
- return (this[offset] << 8) | this[offset + 1]
-}
-
-Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return ((this[offset]) |
- (this[offset + 1] << 8) |
- (this[offset + 2] << 16)) +
- (this[offset + 3] * 0x1000000)
-}
-
-Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return (this[offset] * 0x1000000) +
- ((this[offset + 1] << 16) |
- (this[offset + 2] << 8) |
- this[offset + 3])
-}
-
-Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
- offset = offset | 0
- byteLength = byteLength | 0
- if (!noAssert) checkOffset(offset, byteLength, this.length)
-
- var val = this[offset]
- var mul = 1
- var i = 0
- while (++i < byteLength && (mul *= 0x100)) {
- val += this[offset + i] * mul
- }
- mul *= 0x80
-
- if (val >= mul) val -= Math.pow(2, 8 * byteLength)
-
- return val
-}
-
-Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
- offset = offset | 0
- byteLength = byteLength | 0
- if (!noAssert) checkOffset(offset, byteLength, this.length)
-
- var i = byteLength
- var mul = 1
- var val = this[offset + --i]
- while (i > 0 && (mul *= 0x100)) {
- val += this[offset + --i] * mul
- }
- mul *= 0x80
-
- if (val >= mul) val -= Math.pow(2, 8 * byteLength)
-
- return val
-}
-
-Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 1, this.length)
- if (!(this[offset] & 0x80)) return (this[offset])
- return ((0xff - this[offset] + 1) * -1)
-}
-
-Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 2, this.length)
- var val = this[offset] | (this[offset + 1] << 8)
- return (val & 0x8000) ? val | 0xFFFF0000 : val
-}
-
-Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 2, this.length)
- var val = this[offset + 1] | (this[offset] << 8)
- return (val & 0x8000) ? val | 0xFFFF0000 : val
-}
-
-Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return (this[offset]) |
- (this[offset + 1] << 8) |
- (this[offset + 2] << 16) |
- (this[offset + 3] << 24)
-}
-
-Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 4, this.length)
-
- return (this[offset] << 24) |
- (this[offset + 1] << 16) |
- (this[offset + 2] << 8) |
- (this[offset + 3])
-}
-
-Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 4, this.length)
- return ieee754.read(this, offset, true, 23, 4)
-}
-
-Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 4, this.length)
- return ieee754.read(this, offset, false, 23, 4)
-}
-
-Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 8, this.length)
- return ieee754.read(this, offset, true, 52, 8)
-}
-
-Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
- if (!noAssert) checkOffset(offset, 8, this.length)
- return ieee754.read(this, offset, false, 52, 8)
-}
-
-function checkInt (buf, value, offset, ext, max, min) {
- if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
- if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
- if (offset + ext > buf.length) throw new RangeError('Index out of range')
-}
-
-Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset | 0
- byteLength = byteLength | 0
- if (!noAssert) {
- var maxBytes = Math.pow(2, 8 * byteLength) - 1
- checkInt(this, value, offset, byteLength, maxBytes, 0)
- }
-
- var mul = 1
- var i = 0
- this[offset] = value & 0xFF
- while (++i < byteLength && (mul *= 0x100)) {
- this[offset + i] = (value / mul) & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset | 0
- byteLength = byteLength | 0
- if (!noAssert) {
- var maxBytes = Math.pow(2, 8 * byteLength) - 1
- checkInt(this, value, offset, byteLength, maxBytes, 0)
- }
-
- var i = byteLength - 1
- var mul = 1
- this[offset + i] = value & 0xFF
- while (--i >= 0 && (mul *= 0x100)) {
- this[offset + i] = (value / mul) & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0)
- if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value)
- this[offset] = (value & 0xff)
- return offset + 1
-}
-
-function objectWriteUInt16 (buf, value, offset, littleEndian) {
- if (value < 0) value = 0xffff + value + 1
- for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; ++i) {
- buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>>
- (littleEndian ? i : 1 - i) * 8
- }
-}
-
-Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- this[offset] = (value & 0xff)
- this[offset + 1] = (value >>> 8)
- } else {
- objectWriteUInt16(this, value, offset, true)
- }
- return offset + 2
-}
-
-Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0)
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- this[offset] = (value >>> 8)
- this[offset + 1] = (value & 0xff)
- } else {
- objectWriteUInt16(this, value, offset, false)
- }
- return offset + 2
-}
-
-function objectWriteUInt32 (buf, value, offset, littleEndian) {
- if (value < 0) value = 0xffffffff + value + 1
- for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; ++i) {
- buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff
- }
-}
-
-Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- this[offset + 3] = (value >>> 24)
- this[offset + 2] = (value >>> 16)
- this[offset + 1] = (value >>> 8)
- this[offset] = (value & 0xff)
- } else {
- objectWriteUInt32(this, value, offset, true)
- }
- return offset + 4
-}
-
-Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0)
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- this[offset] = (value >>> 24)
- this[offset + 1] = (value >>> 16)
- this[offset + 2] = (value >>> 8)
- this[offset + 3] = (value & 0xff)
- } else {
- objectWriteUInt32(this, value, offset, false)
- }
- return offset + 4
-}
-
-Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) {
- var limit = Math.pow(2, 8 * byteLength - 1)
-
- checkInt(this, value, offset, byteLength, limit - 1, -limit)
- }
-
- var i = 0
- var mul = 1
- var sub = 0
- this[offset] = value & 0xFF
- while (++i < byteLength && (mul *= 0x100)) {
- if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
- sub = 1
- }
- this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) {
- var limit = Math.pow(2, 8 * byteLength - 1)
-
- checkInt(this, value, offset, byteLength, limit - 1, -limit)
- }
-
- var i = byteLength - 1
- var mul = 1
- var sub = 0
- this[offset + i] = value & 0xFF
- while (--i >= 0 && (mul *= 0x100)) {
- if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
- sub = 1
- }
- this[offset + i] = ((value / mul) >> 0) - sub & 0xFF
- }
-
- return offset + byteLength
-}
-
-Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80)
- if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value)
- if (value < 0) value = 0xff + value + 1
- this[offset] = (value & 0xff)
- return offset + 1
-}
-
-Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- this[offset] = (value & 0xff)
- this[offset + 1] = (value >>> 8)
- } else {
- objectWriteUInt16(this, value, offset, true)
- }
- return offset + 2
-}
-
-Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000)
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- this[offset] = (value >>> 8)
- this[offset + 1] = (value & 0xff)
- } else {
- objectWriteUInt16(this, value, offset, false)
- }
- return offset + 2
-}
-
-Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- this[offset] = (value & 0xff)
- this[offset + 1] = (value >>> 8)
- this[offset + 2] = (value >>> 16)
- this[offset + 3] = (value >>> 24)
- } else {
- objectWriteUInt32(this, value, offset, true)
- }
- return offset + 4
-}
-
-Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
- value = +value
- offset = offset | 0
- if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
- if (value < 0) value = 0xffffffff + value + 1
- if (Buffer.TYPED_ARRAY_SUPPORT) {
- this[offset] = (value >>> 24)
- this[offset + 1] = (value >>> 16)
- this[offset + 2] = (value >>> 8)
- this[offset + 3] = (value & 0xff)
- } else {
- objectWriteUInt32(this, value, offset, false)
- }
- return offset + 4
-}
-
-function checkIEEE754 (buf, value, offset, ext, max, min) {
- if (offset + ext > buf.length) throw new RangeError('Index out of range')
- if (offset < 0) throw new RangeError('Index out of range')
-}
-
-function writeFloat (buf, value, offset, littleEndian, noAssert) {
- if (!noAssert) {
- checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)
- }
- ieee754.write(buf, value, offset, littleEndian, 23, 4)
- return offset + 4
-}
-
-Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
- return writeFloat(this, value, offset, true, noAssert)
-}
-
-Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
- return writeFloat(this, value, offset, false, noAssert)
-}
-
-function writeDouble (buf, value, offset, littleEndian, noAssert) {
- if (!noAssert) {
- checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)
- }
- ieee754.write(buf, value, offset, littleEndian, 52, 8)
- return offset + 8
-}
-
-Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
- return writeDouble(this, value, offset, true, noAssert)
-}
-
-Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
- return writeDouble(this, value, offset, false, noAssert)
-}
-
-// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
-Buffer.prototype.copy = function copy (target, targetStart, start, end) {
- if (!start) start = 0
- if (!end && end !== 0) end = this.length
- if (targetStart >= target.length) targetStart = target.length
- if (!targetStart) targetStart = 0
- if (end > 0 && end < start) end = start
-
- // Copy 0 bytes; we're done
- if (end === start) return 0
- if (target.length === 0 || this.length === 0) return 0
-
- // Fatal error conditions
- if (targetStart < 0) {
- throw new RangeError('targetStart out of bounds')
- }
- if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds')
- if (end < 0) throw new RangeError('sourceEnd out of bounds')
-
- // Are we oob?
- if (end > this.length) end = this.length
- if (target.length - targetStart < end - start) {
- end = target.length - targetStart + start
- }
-
- var len = end - start
- var i
-
- if (this === target && start < targetStart && targetStart < end) {
- // descending copy from end
- for (i = len - 1; i >= 0; --i) {
- target[i + targetStart] = this[i + start]
- }
- } else if (len < 1000 || !Buffer.TYPED_ARRAY_SUPPORT) {
- // ascending copy from start
- for (i = 0; i < len; ++i) {
- target[i + targetStart] = this[i + start]
- }
- } else {
- Uint8Array.prototype.set.call(
- target,
- this.subarray(start, start + len),
- targetStart
- )
- }
-
- return len
-}
-
-// Usage:
-// buffer.fill(number[, offset[, end]])
-// buffer.fill(buffer[, offset[, end]])
-// buffer.fill(string[, offset[, end]][, encoding])
-Buffer.prototype.fill = function fill (val, start, end, encoding) {
- // Handle string cases:
- if (typeof val === 'string') {
- if (typeof start === 'string') {
- encoding = start
- start = 0
- end = this.length
- } else if (typeof end === 'string') {
- encoding = end
- end = this.length
- }
- if (val.length === 1) {
- var code = val.charCodeAt(0)
- if (code < 256) {
- val = code
- }
- }
- if (encoding !== undefined && typeof encoding !== 'string') {
- throw new TypeError('encoding must be a string')
- }
- if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
- throw new TypeError('Unknown encoding: ' + encoding)
- }
- } else if (typeof val === 'number') {
- val = val & 255
- }
-
- // Invalid ranges are not set to a default, so can range check early.
- if (start < 0 || this.length < start || this.length < end) {
- throw new RangeError('Out of range index')
- }
-
- if (end <= start) {
- return this
- }
-
- start = start >>> 0
- end = end === undefined ? this.length : end >>> 0
-
- if (!val) val = 0
-
- var i
- if (typeof val === 'number') {
- for (i = start; i < end; ++i) {
- this[i] = val
- }
- } else {
- var bytes = Buffer.isBuffer(val)
- ? val
- : utf8ToBytes(new Buffer(val, encoding).toString())
- var len = bytes.length
- for (i = 0; i < end - start; ++i) {
- this[i + start] = bytes[i % len]
- }
- }
-
- return this
-}
-
-// HELPER FUNCTIONS
-// ================
-
-var INVALID_BASE64_RE = /[^+\/0-9A-Za-z-_]/g
-
-function base64clean (str) {
- // Node strips out invalid characters like \n and \t from the string, base64-js does not
- str = stringtrim(str).replace(INVALID_BASE64_RE, '')
- // Node converts strings with length < 2 to ''
- if (str.length < 2) return ''
- // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
- while (str.length % 4 !== 0) {
- str = str + '='
- }
- return str
-}
-
-function stringtrim (str) {
- if (str.trim) return str.trim()
- return str.replace(/^\s+|\s+$/g, '')
-}
-
-function toHex (n) {
- if (n < 16) return '0' + n.toString(16)
- return n.toString(16)
-}
-
-function utf8ToBytes (string, units) {
- units = units || Infinity
- var codePoint
- var length = string.length
- var leadSurrogate = null
- var bytes = []
-
- for (var i = 0; i < length; ++i) {
- codePoint = string.charCodeAt(i)
-
- // is surrogate component
- if (codePoint > 0xD7FF && codePoint < 0xE000) {
- // last char was a lead
- if (!leadSurrogate) {
- // no lead yet
- if (codePoint > 0xDBFF) {
- // unexpected trail
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- continue
- } else if (i + 1 === length) {
- // unpaired lead
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- continue
- }
-
- // valid lead
- leadSurrogate = codePoint
-
- continue
- }
-
- // 2 leads in a row
- if (codePoint < 0xDC00) {
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- leadSurrogate = codePoint
- continue
- }
-
- // valid surrogate pair
- codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000
- } else if (leadSurrogate) {
- // valid bmp char, but last char was a lead
- if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD)
- }
-
- leadSurrogate = null
-
- // encode utf8
- if (codePoint < 0x80) {
- if ((units -= 1) < 0) break
- bytes.push(codePoint)
- } else if (codePoint < 0x800) {
- if ((units -= 2) < 0) break
- bytes.push(
- codePoint >> 0x6 | 0xC0,
- codePoint & 0x3F | 0x80
- )
- } else if (codePoint < 0x10000) {
- if ((units -= 3) < 0) break
- bytes.push(
- codePoint >> 0xC | 0xE0,
- codePoint >> 0x6 & 0x3F | 0x80,
- codePoint & 0x3F | 0x80
- )
- } else if (codePoint < 0x110000) {
- if ((units -= 4) < 0) break
- bytes.push(
- codePoint >> 0x12 | 0xF0,
- codePoint >> 0xC & 0x3F | 0x80,
- codePoint >> 0x6 & 0x3F | 0x80,
- codePoint & 0x3F | 0x80
- )
- } else {
- throw new Error('Invalid code point')
- }
- }
-
- return bytes
-}
-
-function asciiToBytes (str) {
- var byteArray = []
- for (var i = 0; i < str.length; ++i) {
- // Node's code seems to be doing this and not & 0x7F..
- byteArray.push(str.charCodeAt(i) & 0xFF)
- }
- return byteArray
-}
-
-function utf16leToBytes (str, units) {
- var c, hi, lo
- var byteArray = []
- for (var i = 0; i < str.length; ++i) {
- if ((units -= 2) < 0) break
-
- c = str.charCodeAt(i)
- hi = c >> 8
- lo = c % 256
- byteArray.push(lo)
- byteArray.push(hi)
- }
-
- return byteArray
-}
-
-function base64ToBytes (str) {
- return base64.toByteArray(base64clean(str))
-}
-
-function blitBuffer (src, dst, offset, length) {
- for (var i = 0; i < length; ++i) {
- if ((i + offset >= dst.length) || (i >= src.length)) break
- dst[i + offset] = src[i]
- }
- return i
-}
-
-function isnan (val) {
- return val !== val // eslint-disable-line no-self-compare
-}
-
-/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(2)))
-
-/***/ }),
-/* 35 */
-/***/ (function(module, exports, __webpack_require__) {
-
-exports = module.exports = __webpack_require__(36)(undefined);
-// imports
-
-
-// module
-exports.push([module.i, "\n.cell,\n.input,\n.output {\n display: flex;\n width: 100%;\n margin-bottom: 10px;\n}\n.cell pre {\n margin: 0;\n width: 100%;\n}\n", ""]);
-
-// exports
-
-
-/***/ }),
-/* 36 */
-/***/ (function(module, exports, __webpack_require__) {
-
-/* WEBPACK VAR INJECTION */(function(Buffer) {/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
-*/
-// css base code, injected by the css-loader
-module.exports = function(useSourceMap) {
- var list = [];
-
- // return the list of modules as css string
- list.toString = function toString() {
- return this.map(function (item) {
- var content = cssWithMappingToString(item, useSourceMap);
- if(item[2]) {
- return "@media " + item[2] + "{" + content + "}";
- } else {
- return content;
- }
- }).join("");
- };
-
- // import a list of modules into the list
- list.i = function(modules, mediaQuery) {
- if(typeof modules === "string")
- modules = [[null, modules, ""]];
- var alreadyImportedModules = {};
- for(var i = 0; i < this.length; i++) {
- var id = this[i][0];
- if(typeof id === "number")
- alreadyImportedModules[id] = true;
- }
- for(i = 0; i < modules.length; i++) {
- var item = modules[i];
- // skip already imported module
- // this implementation is not 100% perfect for weird media query combinations
- // when a module is imported multiple times with different media queries.
- // I hope this will never occur (Hey this way we have smaller bundles)
- if(typeof item[0] !== "number" || !alreadyImportedModules[item[0]]) {
- if(mediaQuery && !item[2]) {
- item[2] = mediaQuery;
- } else if(mediaQuery) {
- item[2] = "(" + item[2] + ") and (" + mediaQuery + ")";
- }
- list.push(item);
- }
- }
- };
- return list;
-};
-
-function cssWithMappingToString(item, useSourceMap) {
- var content = item[1] || '';
- var cssMapping = item[3];
- if (!cssMapping) {
- return content;
- }
-
- if (useSourceMap) {
- var sourceMapping = toComment(cssMapping);
- var sourceURLs = cssMapping.sources.map(function (source) {
- return '/*# sourceURL=' + cssMapping.sourceRoot + source + ' */'
- });
-
- return [content].concat(sourceURLs).concat([sourceMapping]).join('\n');
- }
-
- return [content].join('\n');
-}
-
-// Adapted from convert-source-map (MIT)
-function toComment(sourceMap) {
- var base64 = new Buffer(JSON.stringify(sourceMap)).toString('base64');
- var data = 'sourceMappingURL=data:application/json;charset=utf-8;base64,' + base64;
-
- return '/*# ' + data + ' */';
-}
-
-/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(34).Buffer))
-
-/***/ }),
-/* 37 */
-/***/ (function(module, exports) {
-
-exports.read = function (buffer, offset, isLE, mLen, nBytes) {
- var e, m
- var eLen = nBytes * 8 - mLen - 1
- var eMax = (1 << eLen) - 1
- var eBias = eMax >> 1
- var nBits = -7
- var i = isLE ? (nBytes - 1) : 0
- var d = isLE ? -1 : 1
- var s = buffer[offset + i]
-
- i += d
-
- e = s & ((1 << (-nBits)) - 1)
- s >>= (-nBits)
- nBits += eLen
- for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8) {}
-
- m = e & ((1 << (-nBits)) - 1)
- e >>= (-nBits)
- nBits += mLen
- for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8) {}
-
- if (e === 0) {
- e = 1 - eBias
- } else if (e === eMax) {
- return m ? NaN : ((s ? -1 : 1) * Infinity)
- } else {
- m = m + Math.pow(2, mLen)
- e = e - eBias
- }
- return (s ? -1 : 1) * m * Math.pow(2, e - mLen)
-}
-
-exports.write = function (buffer, value, offset, isLE, mLen, nBytes) {
- var e, m, c
- var eLen = nBytes * 8 - mLen - 1
- var eMax = (1 << eLen) - 1
- var eBias = eMax >> 1
- var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0)
- var i = isLE ? 0 : (nBytes - 1)
- var d = isLE ? 1 : -1
- var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0
-
- value = Math.abs(value)
-
- if (isNaN(value) || value === Infinity) {
- m = isNaN(value) ? 1 : 0
- e = eMax
- } else {
- e = Math.floor(Math.log(value) / Math.LN2)
- if (value * (c = Math.pow(2, -e)) < 1) {
- e--
- c *= 2
- }
- if (e + eBias >= 1) {
- value += rt / c
- } else {
- value += rt * Math.pow(2, 1 - eBias)
- }
- if (value * c >= 2) {
- e++
- c /= 2
- }
-
- if (e + eBias >= eMax) {
- m = 0
- e = eMax
- } else if (e + eBias >= 1) {
- m = (value * c - 1) * Math.pow(2, mLen)
- e = e + eBias
- } else {
- m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen)
- e = 0
- }
- }
-
- for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {}
-
- e = (e << mLen) | m
- eLen += mLen
- for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {}
-
- buffer[offset + i - d] |= s * 128
-}
-
-
-/***/ }),
-/* 38 */
-/***/ (function(module, exports) {
-
-var toString = {}.toString;
-
-module.exports = Array.isArray || function (arr) {
- return toString.call(arr) == '[object Array]';
-};
-
-
-/***/ }),
-/* 39 */
+/* 42 */
/***/ (function(module, exports, __webpack_require__) {
// style-loader: Adds some css to the DOM by adding a <style> tag
// load the styles
-var content = __webpack_require__(35);
+var content = __webpack_require__(19);
if(typeof content === 'string') content = [[module.i, content, '']];
if(content.locals) module.exports = content.locals;
// add the styles to the DOM
-var update = __webpack_require__(40)("87c28124", content, false);
+var update = __webpack_require__(3)("06fc6a9f", content, false);
// Hot Module Replacement
if(false) {
// When the styles change, update the <style> tags
if(!content.locals) {
- module.hot.accept("!!../node_modules/css-loader/index.js!../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-4cb2b168\",\"scoped\":false,\"hasInlineConfig\":false}!../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./index.vue", function() {
- var newContent = require("!!../node_modules/css-loader/index.js!../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-4cb2b168\",\"scoped\":false,\"hasInlineConfig\":false}!../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./index.vue");
+ module.hot.accept("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-3ac4c361\",\"scoped\":true,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./code.vue", function() {
+ var newContent = require("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-3ac4c361\",\"scoped\":true,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./code.vue");
if(typeof newContent === 'string') newContent = [[module.id, newContent, '']];
update(newContent);
});
@@ -5490,291 +5757,23 @@ if(false) {
}
/***/ }),
-/* 40 */
-/***/ (function(module, exports, __webpack_require__) {
-
-/*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
- Modified by Evan You @yyx990803
-*/
-
-var hasDocument = typeof document !== 'undefined'
-
-if (typeof DEBUG !== 'undefined' && DEBUG) {
- if (!hasDocument) {
- throw new Error(
- 'vue-style-loader cannot be used in a non-browser environment. ' +
- "Use { target: 'node' } in your Webpack config to indicate a server-rendering environment."
- ) }
-}
-
-var listToStyles = __webpack_require__(41)
-
-/*
-type StyleObject = {
- id: number;
- parts: Array<StyleObjectPart>
-}
-
-type StyleObjectPart = {
- css: string;
- media: string;
- sourceMap: ?string
-}
-*/
-
-var stylesInDom = {/*
- [id: number]: {
- id: number,
- refs: number,
- parts: Array<(obj?: StyleObjectPart) => void>
- }
-*/}
-
-var head = hasDocument && (document.head || document.getElementsByTagName('head')[0])
-var singletonElement = null
-var singletonCounter = 0
-var isProduction = false
-var noop = function () {}
-
-// Force single-tag solution on IE6-9, which has a hard limit on the # of <style>
-// tags it will allow on a page
-var isOldIE = typeof navigator !== 'undefined' && /msie [6-9]\b/.test(navigator.userAgent.toLowerCase())
-
-module.exports = function (parentId, list, _isProduction) {
- isProduction = _isProduction
-
- var styles = listToStyles(parentId, list)
- addStylesToDom(styles)
-
- return function update (newList) {
- var mayRemove = []
- for (var i = 0; i < styles.length; i++) {
- var item = styles[i]
- var domStyle = stylesInDom[item.id]
- domStyle.refs--
- mayRemove.push(domStyle)
- }
- if (newList) {
- styles = listToStyles(parentId, newList)
- addStylesToDom(styles)
- } else {
- styles = []
- }
- for (var i = 0; i < mayRemove.length; i++) {
- var domStyle = mayRemove[i]
- if (domStyle.refs === 0) {
- for (var j = 0; j < domStyle.parts.length; j++) {
- domStyle.parts[j]()
- }
- delete stylesInDom[domStyle.id]
- }
- }
- }
-}
-
-function addStylesToDom (styles /* Array<StyleObject> */) {
- for (var i = 0; i < styles.length; i++) {
- var item = styles[i]
- var domStyle = stylesInDom[item.id]
- if (domStyle) {
- domStyle.refs++
- for (var j = 0; j < domStyle.parts.length; j++) {
- domStyle.parts[j](item.parts[j])
- }
- for (; j < item.parts.length; j++) {
- domStyle.parts.push(addStyle(item.parts[j]))
- }
- if (domStyle.parts.length > item.parts.length) {
- domStyle.parts.length = item.parts.length
- }
- } else {
- var parts = []
- for (var j = 0; j < item.parts.length; j++) {
- parts.push(addStyle(item.parts[j]))
- }
- stylesInDom[item.id] = { id: item.id, refs: 1, parts: parts }
- }
- }
-}
-
-function createStyleElement () {
- var styleElement = document.createElement('style')
- styleElement.type = 'text/css'
- head.appendChild(styleElement)
- return styleElement
-}
-
-function addStyle (obj /* StyleObjectPart */) {
- var update, remove
- var styleElement = document.querySelector('style[data-vue-ssr-id~="' + obj.id + '"]')
-
- if (styleElement) {
- if (isProduction) {
- // has SSR styles and in production mode.
- // simply do nothing.
- return noop
- } else {
- // has SSR styles but in dev mode.
- // for some reason Chrome can't handle source map in server-rendered
- // style tags - source maps in <style> only works if the style tag is
- // created and inserted dynamically. So we remove the server rendered
- // styles and inject new ones.
- styleElement.parentNode.removeChild(styleElement)
- }
- }
-
- if (isOldIE) {
- // use singleton mode for IE9.
- var styleIndex = singletonCounter++
- styleElement = singletonElement || (singletonElement = createStyleElement())
- update = applyToSingletonTag.bind(null, styleElement, styleIndex, false)
- remove = applyToSingletonTag.bind(null, styleElement, styleIndex, true)
- } else {
- // use multi-style-tag mode in all other cases
- styleElement = createStyleElement()
- update = applyToTag.bind(null, styleElement)
- remove = function () {
- styleElement.parentNode.removeChild(styleElement)
- }
- }
-
- update(obj)
-
- return function updateStyle (newObj /* StyleObjectPart */) {
- if (newObj) {
- if (newObj.css === obj.css &&
- newObj.media === obj.media &&
- newObj.sourceMap === obj.sourceMap) {
- return
- }
- update(obj = newObj)
- } else {
- remove()
- }
- }
-}
-
-var replaceText = (function () {
- var textStore = []
-
- return function (index, replacement) {
- textStore[index] = replacement
- return textStore.filter(Boolean).join('\n')
- }
-})()
-
-function applyToSingletonTag (styleElement, index, remove, obj) {
- var css = remove ? '' : obj.css
-
- if (styleElement.styleSheet) {
- styleElement.styleSheet.cssText = replaceText(index, css)
- } else {
- var cssNode = document.createTextNode(css)
- var childNodes = styleElement.childNodes
- if (childNodes[index]) styleElement.removeChild(childNodes[index])
- if (childNodes.length) {
- styleElement.insertBefore(cssNode, childNodes[index])
- } else {
- styleElement.appendChild(cssNode)
- }
- }
-}
-
-function applyToTag (styleElement, obj) {
- var css = obj.css
- var media = obj.media
- var sourceMap = obj.sourceMap
-
- if (media) {
- styleElement.setAttribute('media', media)
- }
-
- if (sourceMap) {
- // https://developer.chrome.com/devtools/docs/javascript-debugging
- // this makes source maps inside style tags work properly in Chrome
- css += '\n/*# sourceURL=' + sourceMap.sources[0] + ' */'
- // http://stackoverflow.com/a/26603875
- css += '\n/*# sourceMappingURL=data:application/json;base64,' + btoa(unescape(encodeURIComponent(JSON.stringify(sourceMap)))) + ' */'
- }
-
- if (styleElement.styleSheet) {
- styleElement.styleSheet.cssText = css
- } else {
- while (styleElement.firstChild) {
- styleElement.removeChild(styleElement.firstChild)
- }
- styleElement.appendChild(document.createTextNode(css))
- }
-}
-
-
-/***/ }),
-/* 41 */
-/***/ (function(module, exports) {
-
-/**
- * Translates the list format produced by css-loader into something
- * easier to manipulate.
- */
-module.exports = function listToStyles (parentId, list) {
- var styles = []
- var newStyles = {}
- for (var i = 0; i < list.length; i++) {
- var item = list[i]
- var id = item[0]
- var css = item[1]
- var media = item[2]
- var sourceMap = item[3]
- var part = {
- id: parentId + ':' + i,
- css: css,
- media: media,
- sourceMap: sourceMap
- }
- if (!newStyles[id]) {
- styles.push(newStyles[id] = { id: id, parts: [part] })
- } else {
- newStyles[id].parts.push(part)
- }
- }
- return styles
-}
-
-
-/***/ }),
-/* 42 */
-/***/ (function(module, exports, __webpack_require__) {
-
-exports = module.exports = __webpack_require__(36)(undefined);
-// imports
-
-
-// module
-exports.push([module.i, "\n.prompt[data-v-4f6bf458] {\n padding: 0 10px;\n min-width: 7em;\n font-family: monospace;\n}\n", ""]);
-
-// exports
-
-
-/***/ }),
/* 43 */
/***/ (function(module, exports, __webpack_require__) {
// style-loader: Adds some css to the DOM by adding a <style> tag
// load the styles
-var content = __webpack_require__(42);
+var content = __webpack_require__(20);
if(typeof content === 'string') content = [[module.i, content, '']];
if(content.locals) module.exports = content.locals;
// add the styles to the DOM
-var update = __webpack_require__(40)("5b60b003", content, false);
+var update = __webpack_require__(3)("87c28124", content, false);
// Hot Module Replacement
if(false) {
// When the styles change, update the <style> tags
if(!content.locals) {
- module.hot.accept("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-4f6bf458\",\"scoped\":true,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./prompt.vue", function() {
- var newContent = require("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-4f6bf458\",\"scoped\":true,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./prompt.vue");
+ module.hot.accept("!!../node_modules/css-loader/index.js!../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-4cb2b168\",\"scoped\":false,\"hasInlineConfig\":false}!../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./index.vue", function() {
+ var newContent = require("!!../node_modules/css-loader/index.js!../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-4cb2b168\",\"scoped\":false,\"hasInlineConfig\":false}!../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./index.vue");
if(typeof newContent === 'string') newContent = [[module.id, newContent, '']];
update(newContent);
});
@@ -5784,39 +5783,23 @@ if(false) {
}
/***/ }),
-/* 44 */,
-/* 45 */,
-/* 46 */
-/***/ (function(module, exports, __webpack_require__) {
-
-exports = module.exports = __webpack_require__(36)(undefined);
-// imports
-
-
-// module
-exports.push([module.i, "\n.markdown .katex {\n display: block;\n text-align: center;\n}\n", ""]);
-
-// exports
-
-
-/***/ }),
-/* 47 */
+/* 44 */
/***/ (function(module, exports, __webpack_require__) {
// style-loader: Adds some css to the DOM by adding a <style> tag
// load the styles
-var content = __webpack_require__(46);
+var content = __webpack_require__(21);
if(typeof content === 'string') content = [[module.i, content, '']];
if(content.locals) module.exports = content.locals;
// add the styles to the DOM
-var update = __webpack_require__(40)("48dda57c", content, false);
+var update = __webpack_require__(3)("5b60b003", content, false);
// Hot Module Replacement
if(false) {
// When the styles change, update the <style> tags
if(!content.locals) {
- module.hot.accept("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-7342b363\",\"scoped\":false,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./markdown.vue", function() {
- var newContent = require("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-7342b363\",\"scoped\":false,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./markdown.vue");
+ module.hot.accept("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-4f6bf458\",\"scoped\":true,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./prompt.vue", function() {
+ var newContent = require("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-4f6bf458\",\"scoped\":true,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./prompt.vue");
if(typeof newContent === 'string') newContent = [[module.id, newContent, '']];
update(newContent);
});
@@ -5826,37 +5809,23 @@ if(false) {
}
/***/ }),
-/* 48 */
-/***/ (function(module, exports, __webpack_require__) {
-
-exports = module.exports = __webpack_require__(36)(undefined);
-// imports
-
-
-// module
-exports.push([module.i, "\n.cell[data-v-3ac4c361] {\n flex-direction: column;\n}\n", ""]);
-
-// exports
-
-
-/***/ }),
-/* 49 */
+/* 45 */
/***/ (function(module, exports, __webpack_require__) {
// style-loader: Adds some css to the DOM by adding a <style> tag
// load the styles
-var content = __webpack_require__(48);
+var content = __webpack_require__(22);
if(typeof content === 'string') content = [[module.i, content, '']];
if(content.locals) module.exports = content.locals;
// add the styles to the DOM
-var update = __webpack_require__(40)("06fc6a9f", content, false);
+var update = __webpack_require__(3)("48dda57c", content, false);
// Hot Module Replacement
if(false) {
// When the styles change, update the <style> tags
if(!content.locals) {
- module.hot.accept("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-3ac4c361\",\"scoped\":true,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./code.vue", function() {
- var newContent = require("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-3ac4c361\",\"scoped\":true,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./code.vue");
+ module.hot.accept("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-7342b363\",\"scoped\":false,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./markdown.vue", function() {
+ var newContent = require("!!../../node_modules/css-loader/index.js!../../node_modules/vue-loader/lib/style-compiler/index.js?{\"id\":\"data-v-7342b363\",\"scoped\":false,\"hasInlineConfig\":false}!../../node_modules/vue-loader/lib/selector.js?type=styles&index=0!./markdown.vue");
if(typeof newContent === 'string') newContent = [[module.id, newContent, '']];
update(newContent);
});
@@ -5865,6 +5834,54 @@ if(false) {
module.hot.dispose(function() { update(); });
}
+/***/ }),
+/* 46 */
+/***/ (function(module, exports) {
+
+/**
+ * Translates the list format produced by css-loader into something
+ * easier to manipulate.
+ */
+module.exports = function listToStyles (parentId, list) {
+ var styles = []
+ var newStyles = {}
+ for (var i = 0; i < list.length; i++) {
+ var item = list[i]
+ var id = item[0]
+ var css = item[1]
+ var media = item[2]
+ var sourceMap = item[3]
+ var part = {
+ id: parentId + ':' + i,
+ css: css,
+ media: media,
+ sourceMap: sourceMap
+ }
+ if (!newStyles[id]) {
+ styles.push(newStyles[id] = { id: id, parts: [part] })
+ } else {
+ newStyles[id].parts.push(part)
+ }
+ }
+ return styles
+}
+
+
+/***/ }),
+/* 47 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+var Notebook = __webpack_require__(6);
+
+module.exports = {
+ install: function install(_vue) {
+ _vue.component('notebook-lab', Notebook);
+ }
+};
+
/***/ })
/******/ ]);
}); \ No newline at end of file