summaryrefslogtreecommitdiff
path: root/src/chttpd/test/eunit
diff options
context:
space:
mode:
Diffstat (limited to 'src/chttpd/test/eunit')
-rw-r--r--src/chttpd/test/eunit/chttpd_cors_test.erl564
-rw-r--r--src/chttpd/test/eunit/chttpd_csp_tests.erl81
-rw-r--r--src/chttpd/test/eunit/chttpd_db_attachment_size_tests.erl206
-rw-r--r--src/chttpd/test/eunit/chttpd_db_bulk_get_multipart_test.erl313
-rw-r--r--src/chttpd/test/eunit/chttpd_db_bulk_get_test.erl341
-rw-r--r--src/chttpd/test/eunit/chttpd_db_doc_size_tests.erl179
-rw-r--r--src/chttpd/test/eunit/chttpd_db_test.erl462
-rw-r--r--src/chttpd/test/eunit/chttpd_dbs_info_test.erl169
-rw-r--r--src/chttpd/test/eunit/chttpd_error_info_tests.erl168
-rw-r--r--src/chttpd/test/eunit/chttpd_handlers_tests.erl87
-rw-r--r--src/chttpd/test/eunit/chttpd_open_revs_error_test.erl112
-rw-r--r--src/chttpd/test/eunit/chttpd_plugin_tests.erl187
-rw-r--r--src/chttpd/test/eunit/chttpd_prefer_header_test.erl112
-rw-r--r--src/chttpd/test/eunit/chttpd_purge_tests.erl406
-rw-r--r--src/chttpd/test/eunit/chttpd_security_tests.erl384
-rw-r--r--src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl127
-rw-r--r--src/chttpd/test/eunit/chttpd_view_test.erl124
-rw-r--r--src/chttpd/test/eunit/chttpd_welcome_test.erl104
-rw-r--r--src/chttpd/test/eunit/chttpd_xframe_test.erl84
19 files changed, 4210 insertions, 0 deletions
diff --git a/src/chttpd/test/eunit/chttpd_cors_test.erl b/src/chttpd/test/eunit/chttpd_cors_test.erl
new file mode 100644
index 000000000..19e851561
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_cors_test.erl
@@ -0,0 +1,564 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_cors_test).
+
+
+-include_lib("couch/include/couch_db.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include_lib("chttpd/include/chttpd_cors.hrl").
+
+
+-define(DEFAULT_ORIGIN, "http://example.com").
+-define(DEFAULT_ORIGIN_HTTPS, "https://example.com").
+-define(EXPOSED_HEADERS,
+ "content-type, accept-ranges, etag, server, x-couch-request-id, " ++
+ "x-couch-update-newrev, x-couchdb-body-time").
+
+-define(CUSTOM_SUPPORTED_METHODS, ?SUPPORTED_METHODS -- ["CONNECT"]).
+-define(CUSTOM_SUPPORTED_HEADERS, ["extra" | ?SUPPORTED_HEADERS -- ["pragma"]]).
+-define(CUSTOM_EXPOSED_HEADERS, ["expose" | ?COUCH_HEADERS]).
+
+-define(CUSTOM_MAX_AGE, round(?CORS_DEFAULT_MAX_AGE / 2)).
+
+%% Test helpers
+
+
+empty_cors_config() ->
+ [].
+
+
+minimal_cors_config() ->
+ [
+ {<<"enable_cors">>, true},
+ {<<"origins">>, {[]}}
+ ].
+
+
+simple_cors_config() ->
+ [
+ {<<"enable_cors">>, true},
+ {<<"origins">>, {[
+ {list_to_binary(?DEFAULT_ORIGIN), {[]}}
+ ]}}
+ ].
+
+
+wildcard_cors_config() ->
+ [
+ {<<"enable_cors">>, true},
+ {<<"origins">>, {[
+ {<<"*">>, {[]}}
+ ]}}
+ ].
+
+custom_cors_config() ->
+ [
+ {<<"enable_cors">>, true},
+ {<<"allow_methods">>, ?CUSTOM_SUPPORTED_METHODS},
+ {<<"allow_headers">>, ?CUSTOM_SUPPORTED_HEADERS},
+ {<<"exposed_headers">>, ?CUSTOM_EXPOSED_HEADERS},
+ {<<"max_age">>, ?CUSTOM_MAX_AGE},
+ {<<"origins">>, {[
+ {<<"*">>, {[]}}
+ ]}}
+ ].
+
+access_control_cors_config(AllowCredentials) ->
+ [
+ {<<"enable_cors">>, true},
+ {<<"allow_credentials">>, AllowCredentials},
+ {<<"origins">>, {[
+ {list_to_binary(?DEFAULT_ORIGIN), {[]}}
+ ]}}].
+
+
+multiple_cors_config() ->
+ [
+ {<<"enable_cors">>, true},
+ {<<"origins">>, {[
+ {list_to_binary(?DEFAULT_ORIGIN), {[]}},
+ {<<"https://example.com">>, {[]}},
+ {<<"http://example.com:5984">>, {[]}},
+ {<<"https://example.com:5984">>, {[]}}
+ ]}}
+ ].
+
+
+mock_request(Method, Path, Headers0) ->
+ HeaderKey = "Access-Control-Request-Method",
+ Headers = case proplists:get_value(HeaderKey, Headers0, undefined) of
+ nil ->
+ proplists:delete(HeaderKey, Headers0);
+ undefined ->
+ case Method of
+ 'OPTIONS' ->
+ [{HeaderKey, atom_to_list(Method)} | Headers0];
+ _ ->
+ Headers0
+ end;
+ _ ->
+ Headers0
+ end,
+ Headers1 = mochiweb_headers:make(Headers),
+ MochiReq = mochiweb_request:new(nil, Method, Path, {1, 1}, Headers1),
+ PathParts = [list_to_binary(chttpd:unquote(Part))
+ || Part <- string:tokens(Path, "/")],
+ #httpd{method=Method, mochi_req=MochiReq, path_parts=PathParts}.
+
+
+header(#httpd{}=Req, Key) ->
+ chttpd:header_value(Req, Key);
+header({mochiweb_response, [_, _, Headers]}, Key) ->
+ %% header(Headers, Key);
+ mochiweb_headers:get_value(Key, Headers);
+header(Headers, Key) ->
+ couch_util:get_value(Key, Headers, undefined).
+
+
+string_headers(H) ->
+ string:join(H, ", ").
+
+
+assert_not_preflight_(Val) ->
+ ?_assertEqual(not_preflight, Val).
+
+
+%% CORS disabled tests
+
+
+cors_disabled_test_() ->
+ {"CORS disabled tests",
+ [
+ {"Empty user",
+ {foreach,
+ fun empty_cors_config/0,
+ [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_no_headers_/1,
+ fun test_no_headers_server_/1,
+ fun test_no_headers_db_/1
+ ]}}]}.
+
+
+%% CORS enabled tests
+
+
+cors_enabled_minimal_config_test_() ->
+ {"Minimal CORS enabled, no Origins",
+ {foreach,
+ fun minimal_cors_config/0,
+ [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_incorrect_origin_simple_request_/1,
+ fun test_incorrect_origin_preflight_request_/1
+ ]}}.
+
+
+cors_enabled_simple_config_test_() ->
+ {"Simple CORS config",
+ {foreach,
+ fun simple_cors_config/0,
+ [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_preflight_request_/1,
+ fun test_bad_headers_preflight_request_/1,
+ fun test_good_headers_preflight_request_/1,
+ fun test_db_request_/1,
+ fun test_db_preflight_request_/1,
+ fun test_db_host_origin_request_/1,
+ fun test_preflight_with_port_no_origin_/1,
+ fun test_preflight_with_scheme_no_origin_/1,
+ fun test_preflight_with_scheme_port_no_origin_/1,
+ fun test_case_sensitive_mismatch_of_allowed_origins_/1
+ ]}}.
+
+cors_enabled_custom_config_test_() ->
+ {"Simple CORS config with custom allow_methods/allow_headers/exposed_headers",
+ {foreach,
+ fun custom_cors_config/0,
+ [
+ fun test_good_headers_preflight_request_with_custom_config_/1,
+ fun test_db_request_with_custom_config_/1
+ ]}}.
+
+
+cors_enabled_multiple_config_test_() ->
+ {"Multiple options CORS config",
+ {foreach,
+ fun multiple_cors_config/0,
+ [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_preflight_request_/1,
+ fun test_db_request_/1,
+ fun test_db_preflight_request_/1,
+ fun test_db_host_origin_request_/1,
+ fun test_preflight_with_port_with_origin_/1,
+ fun test_preflight_with_scheme_with_origin_/1,
+ fun test_preflight_with_scheme_port_with_origin_/1
+ ]}}.
+
+
+%% Access-Control-Allow-Credentials tests
+
+
+%% http://www.w3.org/TR/cors/#supports-credentials
+%% 6.1.3
+%% If the resource supports credentials add a single
+%% Access-Control-Allow-Origin header, with the value
+%% of the Origin header as value, and add a single
+%% Access-Control-Allow-Credentials header with the
+%% case-sensitive string "true" as value.
+%% Otherwise, add a single Access-Control-Allow-Origin
+%% header, with either the value of the Origin header
+%% or the string "*" as value.
+%% Note: The string "*" cannot be used for a resource
+%% that supports credentials.
+
+db_request_credentials_header_off_test_() ->
+ {"Allow credentials disabled",
+ {setup,
+ fun() ->
+ access_control_cors_config(false)
+ end,
+ fun test_db_request_credentials_header_off_/1
+ }
+ }.
+
+
+db_request_credentials_header_on_test_() ->
+ {"Allow credentials enabled",
+ {setup,
+ fun() ->
+ access_control_cors_config(true)
+ end,
+ fun test_db_request_credentials_header_on_/1
+ }
+ }.
+
+
+%% CORS wildcard tests
+
+
+cors_enabled_wildcard_test_() ->
+ {"Wildcard CORS config",
+ {foreach,
+ fun wildcard_cors_config/0,
+ [
+ fun test_no_access_control_method_preflight_request_/1,
+ fun test_preflight_request_/1,
+ fun test_preflight_request_no_allow_credentials_/1,
+ fun test_preflight_request_empty_request_headers_/1,
+ fun test_db_request_/1,
+ fun test_db_preflight_request_/1,
+ fun test_db_host_origin_request_/1,
+ fun test_preflight_with_port_with_origin_/1,
+ fun test_preflight_with_scheme_with_origin_/1,
+ fun test_preflight_with_scheme_port_with_origin_/1,
+ fun test_case_sensitive_mismatch_of_allowed_origins_/1
+ ]}}.
+
+
+%% Test generators
+
+
+test_no_headers_(OwnerConfig) ->
+ Req = mock_request('GET', "/", []),
+ assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig)).
+
+
+test_no_headers_server_(OwnerConfig) ->
+ Req = mock_request('GET', "/", [{"Origin", "http://127.0.0.1"}]),
+ assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig)).
+
+
+test_no_headers_db_(OwnerConfig) ->
+ Headers = [{"Origin", "http://127.0.0.1"}],
+ Req = mock_request('GET', "/my_db", Headers),
+ assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig)).
+
+
+test_incorrect_origin_simple_request_(OwnerConfig) ->
+ Req = mock_request('GET', "/", [{"Origin", "http://127.0.0.1"}]),
+ [
+ ?_assert(chttpd_cors:is_cors_enabled(OwnerConfig)),
+ assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig))
+ ].
+
+
+test_incorrect_origin_preflight_request_(OwnerConfig) ->
+ Headers = [
+ {"Origin", "http://127.0.0.1"},
+ {"Access-Control-Request-Method", "GET"}
+ ],
+ Req = mock_request('GET', "/", Headers),
+ [
+ ?_assert(chttpd_cors:is_cors_enabled(OwnerConfig)),
+ assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig))
+ ].
+
+
+test_bad_headers_preflight_request_(OwnerConfig) ->
+ Headers = [
+ {"Origin", ?DEFAULT_ORIGIN},
+ {"Access-Control-Request-Method", "GET"},
+ {"Access-Control-Request-Headers", "X-Not-An-Allowed-Headers"}
+ ],
+ Req = mock_request('OPTIONS', "/", Headers),
+ [
+ ?_assert(chttpd_cors:is_cors_enabled(OwnerConfig)),
+ assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig))
+ ].
+
+
+test_good_headers_preflight_request_(OwnerConfig) ->
+ Headers = [
+ {"Origin", ?DEFAULT_ORIGIN},
+ {"Access-Control-Request-Method", "GET"},
+ {"Access-Control-Request-Headers", "accept-language"}
+ ],
+ Req = mock_request('OPTIONS', "/", Headers),
+ ?assert(chttpd_cors:is_cors_enabled(OwnerConfig)),
+ {ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods")),
+ ?_assertEqual(string_headers(["accept-language"]),
+ header(Headers1, "Access-Control-Allow-Headers"))
+ ].
+
+test_good_headers_preflight_request_with_custom_config_(OwnerConfig) ->
+ Headers = [
+ {"Origin", ?DEFAULT_ORIGIN},
+ {"Access-Control-Request-Method", "GET"},
+ {"Access-Control-Request-Headers", "accept-language, extra"},
+ {"Access-Control-Max-Age", ?CORS_DEFAULT_MAX_AGE}
+ ],
+ Req = mock_request('OPTIONS', "/", Headers),
+ ?assert(chttpd_cors:is_cors_enabled(OwnerConfig)),
+ AllowMethods = couch_util:get_value(
+ <<"allow_methods">>, OwnerConfig, ?SUPPORTED_METHODS),
+ MaxAge = couch_util:get_value(
+ <<"max_age">>, OwnerConfig, ?CORS_DEFAULT_MAX_AGE),
+ {ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(string_headers(AllowMethods),
+ header(Headers1, "Access-Control-Allow-Methods")),
+ ?_assertEqual(string_headers(["accept-language", "extra"]),
+ header(Headers1, "Access-Control-Allow-Headers")),
+ ?_assertEqual(MaxAge,
+ header(Headers1, "Access-Control-Max-Age"))
+ ].
+
+
+test_preflight_request_(OwnerConfig) ->
+ Headers = [
+ {"Origin", ?DEFAULT_ORIGIN},
+ {"Access-Control-Request-Method", "GET"}
+ ],
+ Req = mock_request('OPTIONS', "/", Headers),
+ {ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods"))
+ ].
+
+
+test_no_access_control_method_preflight_request_(OwnerConfig) ->
+ Headers = [
+ {"Origin", ?DEFAULT_ORIGIN},
+ {"Access-Control-Request-Method", notnil}
+ ],
+ Req = mock_request('OPTIONS', "/", Headers),
+ assert_not_preflight_(chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig)).
+
+
+test_preflight_request_no_allow_credentials_(OwnerConfig) ->
+ Headers = [
+ {"Origin", ?DEFAULT_ORIGIN},
+ {"Access-Control-Request-Method", "GET"}
+ ],
+ Req = mock_request('OPTIONS', "/", Headers),
+ {ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods")),
+ ?_assertEqual(undefined,
+ header(Headers1, "Access-Control-Allow-Credentials"))
+ ].
+
+
+test_preflight_request_empty_request_headers_(OwnerConfig) ->
+ Headers = [
+ {"Origin", ?DEFAULT_ORIGIN},
+ {"Access-Control-Request-Method", "POST"},
+ {"Access-Control-Request-Headers", ""}
+ ],
+ Req = mock_request('OPTIONS', "/", Headers),
+ {ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods")),
+ ?_assertEqual("",
+ header(Headers1, "Access-Control-Allow-Headers"))
+ ].
+
+
+test_db_request_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN,
+ Headers = [{"Origin", Origin}],
+ Req = mock_request('GET', "/my_db", Headers),
+ Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(?EXPOSED_HEADERS,
+ header(Headers1, "Access-Control-Expose-Headers"))
+ ].
+
+test_db_request_with_custom_config_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN,
+ Headers = [{"Origin", Origin}, {"extra", "EXTRA"}],
+ Req = mock_request('GET', "/my_db", Headers),
+ Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
+ ExposedHeaders = couch_util:get_value(
+ <<"exposed_headers">>, OwnerConfig, ?COUCH_HEADERS),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(lists:sort(["content-type" | ExposedHeaders]),
+ lists:sort(
+ split_list(header(Headers1, "Access-Control-Expose-Headers"))))
+ ].
+
+
+test_db_preflight_request_(OwnerConfig) ->
+ Headers = [
+ {"Origin", ?DEFAULT_ORIGIN}
+ ],
+ Req = mock_request('OPTIONS', "/my_db", Headers),
+ {ok, Headers1} = chttpd_cors:maybe_handle_preflight_request(Req, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(string_headers(?SUPPORTED_METHODS),
+ header(Headers1, "Access-Control-Allow-Methods"))
+ ].
+
+
+test_db_host_origin_request_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN,
+ Headers = [
+ {"Origin", Origin},
+ {"Host", "example.com"}
+ ],
+ Req = mock_request('GET', "/my_db", Headers),
+ Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(?EXPOSED_HEADERS,
+ header(Headers1, "Access-Control-Expose-Headers"))
+ ].
+
+
+test_preflight_origin_helper_(OwnerConfig, Origin, ExpectedOrigin) ->
+ Headers = [
+ {"Origin", Origin},
+ {"Access-Control-Request-Method", "GET"}
+ ],
+ Req = mock_request('OPTIONS', "/", Headers),
+ Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
+ [?_assertEqual(ExpectedOrigin,
+ header(Headers1, "Access-Control-Allow-Origin"))
+ ].
+
+
+test_preflight_with_port_no_origin_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN ++ ":5984",
+ test_preflight_origin_helper_(OwnerConfig, Origin, undefined).
+
+
+test_preflight_with_port_with_origin_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN ++ ":5984",
+ test_preflight_origin_helper_(OwnerConfig, Origin, Origin).
+
+
+test_preflight_with_scheme_no_origin_(OwnerConfig) ->
+ test_preflight_origin_helper_(OwnerConfig, ?DEFAULT_ORIGIN_HTTPS, undefined).
+
+
+test_preflight_with_scheme_with_origin_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN_HTTPS,
+ test_preflight_origin_helper_(OwnerConfig, Origin, Origin).
+
+
+test_preflight_with_scheme_port_no_origin_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN_HTTPS ++ ":5984",
+ test_preflight_origin_helper_(OwnerConfig, Origin, undefined).
+
+
+test_preflight_with_scheme_port_with_origin_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN_HTTPS ++ ":5984",
+ test_preflight_origin_helper_(OwnerConfig, Origin, Origin).
+
+
+test_case_sensitive_mismatch_of_allowed_origins_(OwnerConfig) ->
+ Origin = "http://EXAMPLE.COM",
+ Headers = [{"Origin", Origin}],
+ Req = mock_request('GET', "/", Headers),
+ Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(?EXPOSED_HEADERS,
+ header(Headers1, "Access-Control-Expose-Headers"))
+ ].
+
+
+test_db_request_credentials_header_off_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN,
+ Headers = [{"Origin", Origin}],
+ Req = mock_request('GET', "/", Headers),
+ Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual(undefined,
+ header(Headers1, "Access-Control-Allow-Credentials"))
+ ].
+
+
+test_db_request_credentials_header_on_(OwnerConfig) ->
+ Origin = ?DEFAULT_ORIGIN,
+ Headers = [{"Origin", Origin}],
+ Req = mock_request('GET', "/", Headers),
+ Headers1 = chttpd_cors:headers(Req, Headers, Origin, OwnerConfig),
+ [
+ ?_assertEqual(?DEFAULT_ORIGIN,
+ header(Headers1, "Access-Control-Allow-Origin")),
+ ?_assertEqual("true",
+ header(Headers1, "Access-Control-Allow-Credentials"))
+ ].
+
+split_list(S) ->
+ re:split(S, "\\s*,\\s*", [trim, {return, list}]).
diff --git a/src/chttpd/test/eunit/chttpd_csp_tests.erl b/src/chttpd/test/eunit/chttpd_csp_tests.erl
new file mode 100644
index 000000000..e86436254
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_csp_tests.erl
@@ -0,0 +1,81 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_csp_tests).
+
+-include_lib("couch/include/couch_eunit.hrl").
+
+
+setup() ->
+ ok = config:set("csp", "enable", "true", false),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ lists:concat(["http://", Addr, ":", Port, "/_utils/"]).
+
+teardown(_) ->
+ ok.
+
+
+
+csp_test_() ->
+ {
+ "Content Security Policy tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_not_return_any_csp_headers_when_disabled/1,
+ fun should_apply_default_policy/1,
+ fun should_return_custom_policy/1,
+ fun should_only_enable_csp_when_true/1
+ ]
+ }
+ }
+ }.
+
+
+should_not_return_any_csp_headers_when_disabled(Url) ->
+ ?_assertEqual(undefined,
+ begin
+ ok = config:set("csp", "enable", "false", false),
+ {ok, _, Headers, _} = test_request:get(Url),
+ proplists:get_value("Content-Security-Policy", Headers)
+ end).
+
+should_apply_default_policy(Url) ->
+ ?_assertEqual(
+ "default-src 'self'; img-src 'self' data:; font-src 'self'; "
+ "script-src 'self' 'unsafe-eval'; style-src 'self' 'unsafe-inline';",
+ begin
+ {ok, _, Headers, _} = test_request:get(Url),
+ proplists:get_value("Content-Security-Policy", Headers)
+ end).
+
+should_return_custom_policy(Url) ->
+ ?_assertEqual("default-src 'http://example.com';",
+ begin
+ ok = config:set("csp", "header_value",
+ "default-src 'http://example.com';", false),
+ {ok, _, Headers, _} = test_request:get(Url),
+ proplists:get_value("Content-Security-Policy", Headers)
+ end).
+
+should_only_enable_csp_when_true(Url) ->
+ ?_assertEqual(undefined,
+ begin
+ ok = config:set("csp", "enable", "tru", false),
+ {ok, _, Headers, _} = test_request:get(Url),
+ proplists:get_value("Content-Security-Policy", Headers)
+ end).
diff --git a/src/chttpd/test/eunit/chttpd_db_attachment_size_tests.erl b/src/chttpd/test/eunit/chttpd_db_attachment_size_tests.erl
new file mode 100644
index 000000000..0ab08dd80
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_db_attachment_size_tests.erl
@@ -0,0 +1,206 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_db_attachment_size_tests).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(USER, "chttpd_db_att_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+-define(CONTENT_MULTI_RELATED, {"Content-Type",
+ "multipart/related;boundary=\"bound\""}).
+
+
+setup() ->
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("couchdb", "max_attachment_size", "50", _Persist=false),
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = integer_to_list(mochiweb_socket_server:get(chttpd, port)),
+ Url = "http://" ++ Addr ++ ":" ++ Port ++ "/" ++ ?b2l(TmpDb),
+ create_db(Url),
+ add_doc(Url, "doc1"),
+ Url.
+
+
+teardown(Url) ->
+ delete_db(Url),
+ ok = config:delete("admins", ?USER, _Persist=false),
+ ok = config:delete("couchdb", "max_attachment_size").
+
+
+attachment_size_test_() ->
+ {
+ "chttpd max_attachment_size tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun put_inline/1,
+ fun put_simple/1,
+ fun put_simple_chunked/1,
+ fun put_mp_related/1
+ ]
+ }
+ }
+ }.
+
+
+put_inline(Url) ->
+ ?_test(begin
+ Status = put_inline(Url, "doc2", 50),
+ ?assert(Status =:= 201 orelse Status =:= 202),
+ ?assertEqual(413, put_inline(Url, "doc3", 51))
+ end).
+
+
+put_simple(Url) ->
+ ?_test(begin
+ Headers = [{"Content-Type", "app/binary"}],
+ Rev1 = doc_rev(Url, "doc1"),
+ Data1 = data(50),
+ Status1 = put_req(Url ++ "/doc1/att2?rev=" ++ Rev1, Headers, Data1),
+ ?assert(Status1 =:= 201 orelse Status1 =:= 202),
+ Data2 = data(51),
+ Rev2 = doc_rev(Url, "doc1"),
+ Status2 = put_req(Url ++ "/doc1/att3?rev=" ++ Rev2, Headers, Data2),
+ ?assertEqual(413, Status2)
+ end).
+
+
+put_simple_chunked(Url) ->
+ ?_test(begin
+ Headers = [{"Content-Type", "app/binary"}],
+ Rev1 = doc_rev(Url, "doc1"),
+ DataFun1 = data_stream_fun(50),
+ Status1 = put_req_chunked(Url ++ "/doc1/att2?rev=" ++ Rev1, Headers, DataFun1),
+ ?assert(Status1 =:= 201 orelse Status1 =:= 202),
+ DataFun2 = data_stream_fun(51),
+ Rev2 = doc_rev(Url, "doc1"),
+ Status2 = put_req_chunked(Url ++ "/doc1/att3?rev=" ++ Rev2, Headers, DataFun2),
+ ?assertEqual(413, Status2)
+ end).
+
+
+put_mp_related(Url) ->
+ ?_test(begin
+ Headers = [?CONTENT_MULTI_RELATED],
+ Body1 = mp_body(50),
+ Status1 = put_req(Url ++ "/doc2", Headers, Body1),
+ ?assert(Status1 =:= 201 orelse Status1 =:= 202),
+ Body2 = mp_body(51),
+ Status2 = put_req(Url ++ "/doc3", Headers, Body2),
+ ?assertEqual(413, Status2)
+ end).
+
+
+% Helper functions
+
+create_db(Url) ->
+ Status = put_req(Url, "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+
+add_doc(Url, DocId) ->
+ Status = put_req(Url ++ "/" ++ DocId, "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+
+put_inline(Url, DocId, Size) ->
+ Doc = "{\"_attachments\": {\"att1\":{"
+ "\"content_type\": \"app/binary\", "
+ "\"data\": \"" ++ data_b64(Size) ++ "\""
+ "}}}",
+ put_req(Url ++ "/" ++ DocId, Doc).
+
+
+mp_body(AttSize) ->
+ AttData = data(AttSize),
+ SizeStr = integer_to_list(AttSize),
+ string:join([
+ "--bound",
+
+ "Content-Type: application/json",
+
+ "",
+
+ "{\"_id\":\"doc2\", \"_attachments\":{\"att\":"
+ "{\"content_type\":\"app/binary\", \"length\":" ++ SizeStr ++ ","
+ "\"follows\":true}}}",
+
+ "--bound",
+
+ "Content-Disposition: attachment; filename=\"att\"",
+
+ "Content-Type: app/binary",
+
+ "",
+
+ AttData,
+
+ "--bound--"
+ ], "\r\n").
+
+
+doc_rev(Url, DocId) ->
+ {200, ResultProps} = get_req(Url ++ "/" ++ DocId),
+ {<<"_rev">>, BinRev} = lists:keyfind(<<"_rev">>, 1, ResultProps),
+ binary_to_list(BinRev).
+
+
+put_req(Url, Body) ->
+ put_req(Url, [], Body).
+
+
+put_req(Url, Headers, Body) ->
+ {ok, Status, _, _} = test_request:put(Url, Headers ++ [?AUTH], Body),
+ Status.
+
+
+put_req_chunked(Url, Headers, Body) ->
+ Opts = [{transfer_encoding, {chunked, 1}}],
+ {ok, Status, _, _} = test_request:put(Url, Headers ++ [?AUTH], Body, Opts),
+ Status.
+
+
+get_req(Url) ->
+ {ok, Status, _, ResultBody} = test_request:get(Url, [?CONTENT_JSON, ?AUTH]),
+ {[_ | _] = ResultProps} = ?JSON_DECODE(ResultBody),
+ {Status, ResultProps}.
+
+% Data streaming generator for ibrowse client. ibrowse will repeatedly call the
+% function with State and it should return {ok, Data, NewState} or eof at end.
+data_stream_fun(Size) ->
+ Fun = fun(0) -> eof; (BytesLeft) ->
+ {ok, <<"x">>, BytesLeft - 1}
+ end,
+ {Fun, Size}.
+
+
+data(Size) ->
+ string:copies("x", Size).
+
+
+data_b64(Size) ->
+ base64:encode_to_string(data(Size)).
diff --git a/src/chttpd/test/eunit/chttpd_db_bulk_get_multipart_test.erl b/src/chttpd/test/eunit/chttpd_db_bulk_get_multipart_test.erl
new file mode 100644
index 000000000..8a95c92ac
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_db_bulk_get_multipart_test.erl
@@ -0,0 +1,313 @@
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+%% use this file except in compliance with the License. You may obtain a copy of
+%% the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+%% License for the specific language governing permissions and limitations under
+%% the License.
+
+-module(chttpd_db_bulk_get_multipart_test).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(TIMEOUT, 3000).
+
+
+setup() ->
+ mock(config),
+ mock(chttpd),
+ mock(couch_epi),
+ mock(couch_httpd),
+ mock(couch_stats),
+ mock(fabric),
+ mock(mochireq),
+ Pid = spawn_accumulator(),
+ Pid.
+
+
+teardown(Pid) ->
+ ok = stop_accumulator(Pid),
+ meck:unload().
+
+
+bulk_get_test_() ->
+ {
+ "/db/_bulk_get tests",
+ {
+ foreach, fun setup/0, fun teardown/1,
+ [
+ fun should_require_docs_field/1,
+ fun should_not_accept_specific_query_params/1,
+ fun should_return_empty_results_on_no_docs/1,
+ fun should_get_doc_with_all_revs/1,
+ fun should_validate_doc_with_bad_id/1,
+ fun should_validate_doc_with_bad_rev/1,
+ fun should_validate_missing_doc/1,
+ fun should_validate_bad_atts_since/1,
+ fun should_include_attachments_when_atts_since_specified/1
+ ]
+ }
+ }.
+
+
+should_require_docs_field(_) ->
+ Req = fake_request({[{}]}),
+ Db = test_util:fake_db([{name, <<"foo">>}]),
+ ?_assertThrow({bad_request, _}, chttpd_db:db_req(Req, Db)).
+
+
+should_not_accept_specific_query_params(_) ->
+ Req = fake_request({[{<<"docs">>, []}]}),
+ Db = test_util:fake_db([{name, <<"foo">>}]),
+ lists:map(fun (Param) ->
+ {Param, ?_assertThrow({bad_request, _},
+ begin
+ ok = meck:expect(chttpd, qs,
+ fun(_) -> [{Param, ""}] end),
+ chttpd_db:db_req(Req, Db)
+ end)}
+ end, ["rev", "open_revs", "atts_since", "w", "new_edits"]).
+
+
+should_return_empty_results_on_no_docs(Pid) ->
+ Req = fake_request({[{<<"docs">>, []}]}),
+ Db = test_util:fake_db([{name, <<"foo">>}]),
+ chttpd_db:db_req(Req, Db),
+ Results = get_results_from_response(Pid),
+ ?_assertEqual([], Results).
+
+
+should_get_doc_with_all_revs(Pid) ->
+ DocId = <<"docudoc">>,
+ Req = fake_request(DocId),
+ Db = test_util:fake_db([{name, <<"foo">>}]),
+
+ DocRevA = #doc{id = DocId, body = {[{<<"_rev">>, <<"1-ABC">>}]}},
+ DocRevB = #doc{id = DocId, body = {[{<<"_rev">>, <<"1-CDE">>}]}},
+
+ mock_open_revs(all, {ok, [{ok, DocRevA}, {ok, DocRevB}]}),
+ chttpd_db:db_req(Req, Db),
+
+ Result = get_results_from_response(Pid),
+ ?_assertEqual(DocId, couch_util:get_value(<<"_id">>, Result)).
+
+
+should_validate_doc_with_bad_id(Pid) ->
+ DocId = <<"_docudoc">>,
+
+ Req = fake_request(DocId),
+ Db = test_util:fake_db([{name, <<"foo">>}]),
+ chttpd_db:db_req(Req, Db),
+
+ Result = get_results_from_response(Pid),
+ ?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
+
+ ?_assertMatch([{<<"id">>, DocId},
+ {<<"rev">>, null},
+ {<<"error">>, <<"illegal_docid">>},
+ {<<"reason">>, _}], Result).
+
+
+should_validate_doc_with_bad_rev(Pid) ->
+ DocId = <<"docudoc">>,
+ Rev = <<"revorev">>,
+
+ Req = fake_request(DocId, Rev),
+ Db = test_util:fake_db([{name, <<"foo">>}]),
+ chttpd_db:db_req(Req, Db),
+
+ Result = get_results_from_response(Pid),
+ ?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
+
+ ?_assertMatch([{<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, _}], Result).
+
+
+should_validate_missing_doc(Pid) ->
+ DocId = <<"docudoc">>,
+ Rev = <<"1-revorev">>,
+
+ Req = fake_request(DocId, Rev),
+ Db = test_util:fake_db([{name, <<"foo">>}]),
+ mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ chttpd_db:db_req(Req, Db),
+
+ Result = get_results_from_response(Pid),
+ ?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
+
+ ?_assertMatch([{<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"error">>, <<"not_found">>},
+ {<<"reason">>, _}], Result).
+
+
+should_validate_bad_atts_since(Pid) ->
+ DocId = <<"docudoc">>,
+ Rev = <<"1-revorev">>,
+
+ Req = fake_request(DocId, Rev, <<"badattsince">>),
+ Db = test_util:fake_db([{name, <<"foo">>}]),
+ mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ chttpd_db:db_req(Req, Db),
+
+ Result = get_results_from_response(Pid),
+ ?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
+
+ ?_assertMatch([{<<"id">>, DocId},
+ {<<"rev">>, <<"badattsince">>},
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, _}], Result).
+
+
+should_include_attachments_when_atts_since_specified(_) ->
+ DocId = <<"docudoc">>,
+ Rev = <<"1-revorev">>,
+
+ Req = fake_request(DocId, Rev, [<<"1-abc">>]),
+ Db = test_util:fake_db([{name, <<"foo">>}]),
+ mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ chttpd_db:db_req(Req, Db),
+
+ ?_assert(meck:called(fabric, open_revs,
+ ['_', DocId, [{1, <<"revorev">>}],
+ [{atts_since, [{1, <<"abc">>}]}, attachments,
+ {user_ctx, undefined}]])).
+
+%% helpers
+
+fake_request(Payload) when is_tuple(Payload) ->
+ #httpd{method='POST', path_parts=[<<"db">>, <<"_bulk_get">>],
+ mochi_req=mochireq, req_body=Payload};
+fake_request(DocId) when is_binary(DocId) ->
+ fake_request({[{<<"docs">>, [{[{<<"id">>, DocId}]}]}]}).
+
+fake_request(DocId, Rev) ->
+ fake_request({[{<<"docs">>, [{[{<<"id">>, DocId}, {<<"rev">>, Rev}]}]}]}).
+
+fake_request(DocId, Rev, AttsSince) ->
+ fake_request({[{<<"docs">>, [{[{<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"atts_since">>, AttsSince}]}]}]}).
+
+
+mock_open_revs(RevsReq0, RevsResp) ->
+ ok = meck:expect(fabric, open_revs,
+ fun(_, _, RevsReq1, _) ->
+ ?assertEqual(RevsReq0, RevsReq1),
+ RevsResp
+ end).
+
+
+mock(mochireq) ->
+ ok = meck:new(mochireq, [non_strict]),
+ ok = meck:expect(mochireq, parse_qs, fun() -> [] end),
+ ok = meck:expect(mochireq, accepts_content_type, fun("multipart/mixed") -> true;
+ ("multipart/related") -> true;
+ (_) -> false end),
+ ok;
+mock(couch_httpd) ->
+ ok = meck:new(couch_httpd, [passthrough]),
+ ok = meck:expect(couch_httpd, validate_ctype, fun(_, _) -> ok end),
+ ok = meck:expect(couch_httpd, last_chunk, fun(_) -> {ok, nil} end),
+ ok = meck:expect(couch_httpd, send_chunk, fun send_chunk/2),
+ ok;
+mock(chttpd) ->
+ ok = meck:new(chttpd, [passthrough]),
+ ok = meck:expect(chttpd, start_json_response, fun(_, _) -> {ok, nil} end),
+ ok = meck:expect(chttpd, start_chunked_response, fun(_, _, _) -> {ok, nil} end),
+ ok = meck:expect(chttpd, end_json_response, fun(_) -> ok end),
+ ok = meck:expect(chttpd, send_chunk, fun send_chunk/2),
+ ok = meck:expect(chttpd, json_body_obj, fun (#httpd{req_body=Body}) -> Body end),
+ ok;
+mock(couch_epi) ->
+ ok = meck:new(couch_epi, [passthrough]),
+ ok = meck:expect(couch_epi, any, fun(_, _, _, _, _) -> false end),
+ ok;
+mock(couch_stats) ->
+ ok = meck:new(couch_stats, [passthrough]),
+ ok = meck:expect(couch_stats, increment_counter, fun(_) -> ok end),
+ ok = meck:expect(couch_stats, increment_counter, fun(_, _) -> ok end),
+ ok = meck:expect(couch_stats, decrement_counter, fun(_) -> ok end),
+ ok = meck:expect(couch_stats, decrement_counter, fun(_, _) -> ok end),
+ ok = meck:expect(couch_stats, update_histogram, fun(_, _) -> ok end),
+ ok = meck:expect(couch_stats, update_gauge, fun(_, _) -> ok end),
+ ok;
+mock(fabric) ->
+ ok = meck:new(fabric, [passthrough]),
+ ok;
+mock(config) ->
+ ok = meck:new(config, [passthrough]),
+ ok = meck:expect(config, get, fun(_, _, Default) -> Default end),
+ ok.
+
+
+spawn_accumulator() ->
+ Parent = self(),
+ Pid = spawn(fun() -> accumulator_loop(Parent, []) end),
+ erlang:put(chunks_gather, Pid),
+ Pid.
+
+accumulator_loop(Parent, Acc) ->
+ receive
+ {stop, Ref} ->
+ Parent ! {ok, Ref};
+ {get, Ref} ->
+ Parent ! {ok, Ref, Acc},
+ accumulator_loop(Parent, Acc);
+ {put, Ref, Chunk} ->
+ Parent ! {ok, Ref},
+ accumulator_loop(Parent, [Chunk|Acc])
+ end.
+
+stop_accumulator(Pid) ->
+ Ref = make_ref(),
+ Pid ! {stop, Ref},
+ receive
+ {ok, Ref} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, <<"process stop timeout">>})
+ end.
+
+
+send_chunk(_, []) ->
+ {ok, nil};
+send_chunk(_Req, [H|T]=Chunk) when is_list(Chunk) ->
+ send_chunk(_Req, H),
+ send_chunk(_Req, T);
+send_chunk(_, Chunk) ->
+ Worker = erlang:get(chunks_gather),
+ Ref = make_ref(),
+ Worker ! {put, Ref, Chunk},
+ receive
+ {ok, Ref} -> {ok, nil}
+ after ?TIMEOUT ->
+ throw({timeout, <<"send chunk timeout">>})
+ end.
+
+
+get_response(Pid) ->
+ Ref = make_ref(),
+ Pid ! {get, Ref},
+ receive
+ {ok, Ref, Acc} ->
+ Acc
+ after ?TIMEOUT ->
+ throw({timeout, <<"get response timeout">>})
+ end.
+
+get_results_from_response(Pid) ->
+ case get_response(Pid) of
+ [] ->
+ [];
+ Result ->
+ {Result1} = ?JSON_DECODE(lists:nth(2, Result)),
+ Result1
+ end.
diff --git a/src/chttpd/test/eunit/chttpd_db_bulk_get_test.erl b/src/chttpd/test/eunit/chttpd_db_bulk_get_test.erl
new file mode 100644
index 000000000..864e7079a
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_db_bulk_get_test.erl
@@ -0,0 +1,341 @@
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+%% use this file except in compliance with the License. You may obtain a copy of
+%% the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+%% License for the specific language governing permissions and limitations under
+%% the License.
+
+-module(chttpd_db_bulk_get_test).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(TIMEOUT, 3000).
+
+
+setup() ->
+ mock(config),
+ mock(chttpd),
+ mock(couch_epi),
+ mock(couch_httpd),
+ mock(couch_stats),
+ mock(fabric),
+ mock(mochireq),
+ Pid = spawn_accumulator(),
+ Pid.
+
+
+teardown(Pid) ->
+ ok = stop_accumulator(Pid),
+ meck:unload(config),
+ meck:unload(chttpd),
+ meck:unload(couch_epi),
+ meck:unload(couch_httpd),
+ meck:unload(couch_stats),
+ meck:unload(fabric),
+ meck:unload(mochireq).
+
+
+bulk_get_test_() ->
+ {
+ "/db/_bulk_get tests",
+ {
+ foreach, fun setup/0, fun teardown/1,
+ [
+ fun should_require_docs_field/1,
+ fun should_not_accept_specific_query_params/1,
+ fun should_return_empty_results_on_no_docs/1,
+ fun should_get_doc_with_all_revs/1,
+ fun should_validate_doc_with_bad_id/1,
+ fun should_validate_doc_with_bad_rev/1,
+ fun should_validate_missing_doc/1,
+ fun should_validate_bad_atts_since/1,
+ fun should_include_attachments_when_atts_since_specified/1
+ ]
+ }
+ }.
+
+
+should_require_docs_field(_) ->
+ Req = fake_request({[{}]}),
+ ?_assertThrow({bad_request, _}, chttpd_db:db_req(Req, nil)).
+
+
+should_not_accept_specific_query_params(_) ->
+ Req = fake_request({[{<<"docs">>, []}]}),
+ lists:map(fun (Param) ->
+ {Param, ?_assertThrow({bad_request, _},
+ begin
+ ok = meck:expect(chttpd, qs,
+ fun(_) -> [{Param, ""}] end),
+ chttpd_db:db_req(Req, nil)
+ end)}
+ end, ["rev", "open_revs", "atts_since", "w", "new_edits"]).
+
+
+should_return_empty_results_on_no_docs(Pid) ->
+ Req = fake_request({[{<<"docs">>, []}]}),
+ chttpd_db:db_req(Req, nil),
+ Results = get_results_from_response(Pid),
+ ?_assertEqual([], Results).
+
+
+should_get_doc_with_all_revs(Pid) ->
+ DocId = <<"docudoc">>,
+ Req = fake_request(DocId),
+
+ RevA = {[{<<"_id">>, DocId}, {<<"_rev">>, <<"1-ABC">>}]},
+ RevB = {[{<<"_id">>, DocId}, {<<"_rev">>, <<"1-CDE">>}]},
+ DocRevA = #doc{id = DocId, body = {[{<<"_rev">>, <<"1-ABC">>}]}},
+ DocRevB = #doc{id = DocId, body = {[{<<"_rev">>, <<"1-CDE">>}]}},
+
+ mock_open_revs(all, {ok, [{ok, DocRevA}, {ok, DocRevB}]}),
+ chttpd_db:db_req(Req, test_util:fake_db([{name, <<"foo">>}])),
+
+ [{Result}] = get_results_from_response(Pid),
+ ?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
+
+ Docs = couch_util:get_value(<<"docs">>, Result),
+ ?assertEqual(2, length(Docs)),
+
+ [{DocA0}, {DocB0}] = Docs,
+
+ DocA = couch_util:get_value(<<"ok">>, DocA0),
+ DocB = couch_util:get_value(<<"ok">>, DocB0),
+
+ ?_assertEqual([RevA, RevB], [DocA, DocB]).
+
+
+should_validate_doc_with_bad_id(Pid) ->
+ DocId = <<"_docudoc">>,
+
+ Req = fake_request(DocId),
+ chttpd_db:db_req(Req, test_util:fake_db([{name, <<"foo">>}])),
+
+ [{Result}] = get_results_from_response(Pid),
+ ?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
+
+ Docs = couch_util:get_value(<<"docs">>, Result),
+ ?assertEqual(1, length(Docs)),
+ [{DocResult}] = Docs,
+
+ Doc = couch_util:get_value(<<"error">>, DocResult),
+
+ ?_assertMatch({[{<<"id">>, DocId},
+ {<<"rev">>, null},
+ {<<"error">>, <<"illegal_docid">>},
+ {<<"reason">>, _}]},
+ Doc).
+
+
+should_validate_doc_with_bad_rev(Pid) ->
+ DocId = <<"docudoc">>,
+ Rev = <<"revorev">>,
+
+ Req = fake_request(DocId, Rev),
+ chttpd_db:db_req(Req, test_util:fake_db([{name, <<"foo">>}])),
+
+ [{Result}] = get_results_from_response(Pid),
+ ?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
+
+ Docs = couch_util:get_value(<<"docs">>, Result),
+ ?assertEqual(1, length(Docs)),
+ [{DocResult}] = Docs,
+
+ Doc = couch_util:get_value(<<"error">>, DocResult),
+
+ ?_assertMatch({[{<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, _}]},
+ Doc).
+
+
+should_validate_missing_doc(Pid) ->
+ DocId = <<"docudoc">>,
+ Rev = <<"1-revorev">>,
+
+ Req = fake_request(DocId, Rev),
+ mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ chttpd_db:db_req(Req, test_util:fake_db([{name, <<"foo">>}])),
+
+ [{Result}] = get_results_from_response(Pid),
+ ?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
+
+ Docs = couch_util:get_value(<<"docs">>, Result),
+ ?assertEqual(1, length(Docs)),
+ [{DocResult}] = Docs,
+
+ Doc = couch_util:get_value(<<"error">>, DocResult),
+
+ ?_assertMatch({[{<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"error">>, <<"not_found">>},
+ {<<"reason">>, _}]},
+ Doc).
+
+
+should_validate_bad_atts_since(Pid) ->
+ DocId = <<"docudoc">>,
+ Rev = <<"1-revorev">>,
+
+ Req = fake_request(DocId, Rev, <<"badattsince">>),
+ mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ chttpd_db:db_req(Req, test_util:fake_db([{name, <<"foo">>}])),
+
+ [{Result}] = get_results_from_response(Pid),
+ ?assertEqual(DocId, couch_util:get_value(<<"id">>, Result)),
+
+ Docs = couch_util:get_value(<<"docs">>, Result),
+ ?assertEqual(1, length(Docs)),
+ [{DocResult}] = Docs,
+
+ Doc = couch_util:get_value(<<"error">>, DocResult),
+
+ ?_assertMatch({[{<<"id">>, DocId},
+ {<<"rev">>, <<"badattsince">>},
+ {<<"error">>, <<"bad_request">>},
+ {<<"reason">>, _}]},
+ Doc).
+
+
+should_include_attachments_when_atts_since_specified(_) ->
+ DocId = <<"docudoc">>,
+ Rev = <<"1-revorev">>,
+
+ Req = fake_request(DocId, Rev, [<<"1-abc">>]),
+ mock_open_revs([{1,<<"revorev">>}], {ok, []}),
+ chttpd_db:db_req(Req, test_util:fake_db([{name, <<"foo">>}])),
+
+ ?_assert(meck:called(fabric, open_revs,
+ ['_', DocId, [{1, <<"revorev">>}],
+ [{atts_since, [{1, <<"abc">>}]}, attachments,
+ {user_ctx, undefined}]])).
+
+%% helpers
+
+fake_request(Payload) when is_tuple(Payload) ->
+ #httpd{method='POST', path_parts=[<<"db">>, <<"_bulk_get">>],
+ mochi_req=mochireq, req_body=Payload};
+fake_request(DocId) when is_binary(DocId) ->
+ fake_request({[{<<"docs">>, [{[{<<"id">>, DocId}]}]}]}).
+
+fake_request(DocId, Rev) ->
+ fake_request({[{<<"docs">>, [{[{<<"id">>, DocId}, {<<"rev">>, Rev}]}]}]}).
+
+fake_request(DocId, Rev, AttsSince) ->
+ fake_request({[{<<"docs">>, [{[{<<"id">>, DocId},
+ {<<"rev">>, Rev},
+ {<<"atts_since">>, AttsSince}]}]}]}).
+
+
+mock_open_revs(RevsReq0, RevsResp) ->
+ ok = meck:expect(fabric, open_revs,
+ fun(_, _, RevsReq1, _) ->
+ ?assertEqual(RevsReq0, RevsReq1),
+ RevsResp
+ end).
+
+
+mock(mochireq) ->
+ ok = meck:new(mochireq, [non_strict]),
+ ok = meck:expect(mochireq, parse_qs, fun() -> [] end),
+ ok = meck:expect(mochireq, accepts_content_type, fun(_) -> false end),
+ ok;
+mock(couch_httpd) ->
+ ok = meck:new(couch_httpd, [passthrough]),
+ ok = meck:expect(couch_httpd, validate_ctype, fun(_, _) -> ok end),
+ ok;
+mock(chttpd) ->
+ ok = meck:new(chttpd, [passthrough]),
+ ok = meck:expect(chttpd, start_json_response, fun(_, _) -> {ok, nil} end),
+ ok = meck:expect(chttpd, end_json_response, fun(_) -> ok end),
+ ok = meck:expect(chttpd, send_chunk, fun send_chunk/2),
+ ok = meck:expect(chttpd, json_body_obj, fun (#httpd{req_body=Body}) -> Body end),
+ ok;
+mock(couch_epi) ->
+ ok = meck:new(couch_epi, [passthrough]),
+ ok = meck:expect(couch_epi, any, fun(_, _, _, _, _) -> false end),
+ ok;
+mock(couch_stats) ->
+ ok = meck:new(couch_stats, [passthrough]),
+ ok = meck:expect(couch_stats, increment_counter, fun(_) -> ok end),
+ ok = meck:expect(couch_stats, increment_counter, fun(_, _) -> ok end),
+ ok = meck:expect(couch_stats, decrement_counter, fun(_) -> ok end),
+ ok = meck:expect(couch_stats, decrement_counter, fun(_, _) -> ok end),
+ ok = meck:expect(couch_stats, update_histogram, fun(_, _) -> ok end),
+ ok = meck:expect(couch_stats, update_gauge, fun(_, _) -> ok end),
+ ok;
+mock(fabric) ->
+ ok = meck:new(fabric, [passthrough]),
+ ok;
+mock(config) ->
+ ok = meck:new(config, [passthrough]),
+ ok = meck:expect(config, get, fun(_, _, Default) -> Default end),
+ ok.
+
+
+spawn_accumulator() ->
+ Parent = self(),
+ Pid = spawn(fun() -> accumulator_loop(Parent, []) end),
+ erlang:put(chunks_gather, Pid),
+ Pid.
+
+accumulator_loop(Parent, Acc) ->
+ receive
+ {stop, Ref} ->
+ Parent ! {ok, Ref};
+ {get, Ref} ->
+ Parent ! {ok, Ref, Acc},
+ accumulator_loop(Parent, Acc);
+ {put, Ref, Chunk} ->
+ Parent ! {ok, Ref},
+ accumulator_loop(Parent, [Chunk|Acc])
+ end.
+
+stop_accumulator(Pid) ->
+ Ref = make_ref(),
+ Pid ! {stop, Ref},
+ receive
+ {ok, Ref} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, <<"process stop timeout">>})
+ end.
+
+
+send_chunk(_, []) ->
+ {ok, nil};
+send_chunk(_Req, [H|T]=Chunk) when is_list(Chunk) ->
+ send_chunk(_Req, H),
+ send_chunk(_Req, T);
+send_chunk(_, Chunk) ->
+ Worker = erlang:get(chunks_gather),
+ Ref = make_ref(),
+ Worker ! {put, Ref, Chunk},
+ receive
+ {ok, Ref} -> {ok, nil}
+ after ?TIMEOUT ->
+ throw({timeout, <<"send chunk timeout">>})
+ end.
+
+
+get_response(Pid) ->
+ Ref = make_ref(),
+ Pid ! {get, Ref},
+ receive
+ {ok, Ref, Acc} ->
+ ?JSON_DECODE(iolist_to_binary(lists:reverse(Acc)))
+ after ?TIMEOUT ->
+ throw({timeout, <<"get response timeout">>})
+ end.
+
+
+get_results_from_response(Pid) ->
+ {Resp} = get_response(Pid),
+ couch_util:get_value(<<"results">>, Resp).
diff --git a/src/chttpd/test/eunit/chttpd_db_doc_size_tests.erl b/src/chttpd/test/eunit/chttpd_db_doc_size_tests.erl
new file mode 100644
index 000000000..88e2797a3
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_db_doc_size_tests.erl
@@ -0,0 +1,179 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_db_doc_size_tests).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(USER, "chttpd_db_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+-define(CONTENT_MULTI_RELATED, {"Content-Type",
+ "multipart/related;boundary=\"bound\""}).
+-define(CONTENT_MULTI_FORM, {"Content-Type",
+ "multipart/form-data;boundary=\"bound\""}).
+
+
+setup() ->
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ ok = config:set("couchdb", "max_document_size", "50"),
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
+ create_db(Url),
+ Url.
+
+teardown(Url) ->
+ delete_db(Url),
+ ok = config:delete("admins", ?USER, _Persist=false),
+ ok = config:delete("couchdb", "max_document_size").
+
+create_db(Url) ->
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ case Status of
+ 201 -> ok;
+ 202 -> ok;
+ _ -> io:format(user, "~n HTTP Status Code: ~p~n", [Status])
+ end,
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+all_test_() ->
+ {
+ "chttpd db max_document_size tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun post_single_doc/1,
+ fun put_single_doc/1,
+ fun bulk_doc/1,
+ fun put_post_doc_attach_inline/1,
+ fun put_multi_part_related/1,
+ fun post_multi_part_form/1
+ ]
+ }
+ }
+ }.
+
+post_single_doc(Url) ->
+ NewDoc = "{\"post_single_doc\": \"some_doc\",
+ \"_id\": \"testdoc\", \"should_be\" : \"too_large\"}",
+ {ok, _, _, ResultBody} = test_request:post(Url,
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+ {[ErrorMsg | _]} = ?JSON_DECODE(ResultBody),
+ ?_assertEqual({<<"error">>, <<"document_too_large">>}, ErrorMsg).
+
+put_single_doc(Url) ->
+ NewDoc = "{\"post_single_doc\": \"some_doc\",
+ \"_id\": \"testdoc\", \"should_be\" : \"too_large\"}",
+ {ok, _, _, ResultBody} = test_request:put(Url ++ "/" ++ "testid",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+ {[ErrorMsg | _]} = ?JSON_DECODE(ResultBody),
+ ?_assertEqual({<<"error">>, <<"document_too_large">>}, ErrorMsg).
+
+bulk_doc(Url) ->
+ NewDoc = "{\"docs\": [{\"doc1\": 1}, {\"errordoc\":
+ \"this_should_be_the_too_large_error_document\"}]}",
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ Expect = {[{<<"error">>,<<"document_too_large">>},{<<"reason">>,<<>>}]},
+ ?_assertEqual(Expect, ResultJson).
+
+put_post_doc_attach_inline(Url) ->
+ Body1 = "{\"body\":\"This is a body.\",",
+ Body2 = lists:concat(["{\"body\":\"This is a body it should fail",
+ "because there are too many characters.\","]),
+ DocRest = lists:concat(["\"_attachments\":{\"foo.txt\":{",
+ "\"content_type\":\"text/plain\",",
+ "\"data\": \"VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=\"}}}"]),
+ Doc1 = lists:concat([Body1, DocRest]),
+ Doc2 = lists:concat([Body2, DocRest]),
+
+ {ok, _, _, ResultBody} = test_request:post(Url,
+ [?CONTENT_JSON, ?AUTH], Doc1),
+ {[Msg | _]} = ?JSON_DECODE(ResultBody),
+ {ok, _, _, ResultBody1} = test_request:post(Url,
+ [?CONTENT_JSON, ?AUTH], Doc2),
+ {[Msg1 | _]} = ?JSON_DECODE(ResultBody1),
+
+ {ok, _, _, ResultBody2} = test_request:put(Url ++ "/" ++ "accept",
+ [?CONTENT_JSON, ?AUTH], Doc1),
+ {[Msg2 | _]} = ?JSON_DECODE(ResultBody2),
+ {ok, _, _, ResultBody3} = test_request:put(Url ++ "/" ++ "fail",
+ [?CONTENT_JSON, ?AUTH], Doc2),
+ {[Msg3 | _]} = ?JSON_DECODE(ResultBody3),
+ [
+ ?_assertEqual({<<"ok">>, true}, Msg),
+ ?_assertEqual({<<"error">>, <<"document_too_large">>}, Msg1),
+ ?_assertEqual({<<"ok">>, true}, Msg2),
+ ?_assertEqual({<<"error">>, <<"document_too_large">>}, Msg3)
+ ].
+
+put_multi_part_related(Url) ->
+ Body1 = "{\"body\":\"This is a body.\",",
+ Body2 = lists:concat(["{\"body\":\"This is a body it should fail",
+ "because there are too many characters.\","]),
+ DocBeg = "--bound\r\nContent-Type: application/json\r\n\r\n",
+ DocRest = lists:concat(["\"_attachments\":{\"foo.txt\":{\"follows\":true,",
+ "\"content_type\":\"text/plain\",\"length\":21},\"bar.txt\":",
+ "{\"follows\":true,\"content_type\":\"text/plain\",",
+ "\"length\":20}}}\r\n--bound\r\n\r\nthis is 21 chars long",
+ "\r\n--bound\r\n\r\nthis is 20 chars lon\r\n--bound--epilogue"]),
+ Doc1 = lists:concat([DocBeg, Body1, DocRest]),
+ Doc2 = lists:concat([DocBeg, Body2, DocRest]),
+ {ok, _, _, ResultBody} = test_request:put(Url ++ "/" ++ "accept",
+ [?CONTENT_MULTI_RELATED, ?AUTH], Doc1),
+ {[Msg | _]} = ?JSON_DECODE(ResultBody),
+ {ok, _, _, ResultBody1} = test_request:put(Url ++ "/" ++ "faildoc",
+ [?CONTENT_MULTI_RELATED, ?AUTH], Doc2),
+ {[Msg1 | _]} = ?JSON_DECODE(ResultBody1),
+ [
+ ?_assertEqual({<<"ok">>, true}, Msg),
+ ?_assertEqual({<<"error">>, <<"document_too_large">>}, Msg1)
+ ].
+
+post_multi_part_form(Url) ->
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Host = lists:concat([ "http://127.0.0.1:", Port]),
+ Referer = {"Referer", Host},
+ Body1 = "{\"body\":\"This is a body.\"}",
+ Body2 = lists:concat(["{\"body\":\"This is a body it should fail",
+ "because there are too many characters.\"}"]),
+ DocBeg = "--bound\r\nContent-Disposition: form-data; name=\"_doc\"\r\n\r\n",
+ DocRest = lists:concat(["\r\n--bound\r\nContent-Disposition:",
+ "form-data; name=\"_attachments\"; filename=\"file.txt\"\r\n",
+ "Content-Type: text/plain\r\n\r\ncontents of file.txt\r\n\r\n",
+ "--bound--"]),
+ Doc1 = lists:concat([DocBeg, Body1, DocRest]),
+ Doc2 = lists:concat([DocBeg, Body2, DocRest]),
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/" ++ "accept",
+ [?CONTENT_MULTI_FORM, ?AUTH, Referer], Doc1),
+ {[Msg | _]} = ?JSON_DECODE(ResultBody),
+ {ok, _, _, ResultBody1} = test_request:post(Url ++ "/" ++ "fail",
+ [?CONTENT_MULTI_FORM, ?AUTH, Referer], Doc2),
+ {[Msg1 | _]} = ?JSON_DECODE(ResultBody1),
+ [
+ ?_assertEqual({<<"ok">>, true}, Msg),
+ ?_assertEqual({<<"error">>, <<"document_too_large">>}, Msg1)
+ ].
diff --git a/src/chttpd/test/eunit/chttpd_db_test.erl b/src/chttpd/test/eunit/chttpd_db_test.erl
new file mode 100644
index 000000000..c819bdf6e
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_db_test.erl
@@ -0,0 +1,462 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_db_test).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(USER, "chttpd_db_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+-define(DESTHEADER1, {"Destination", "foo%E5%95%8Abar"}).
+-define(DESTHEADER2, {"Destination", "foo%2Fbar%23baz%3Fpow%3Afiz"}).
+-define(FIXTURE_TXT, ?ABS_PATH(?FILE)).
+-define(i2l(I), integer_to_list(I)).
+-define(TIMEOUT, 60). % seconds
+
+setup() ->
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
+ create_db(Url),
+ Url.
+
+teardown(Url) ->
+ delete_db(Url),
+ ok = config:delete("admins", ?USER, _Persist=false).
+
+create_db(Url) ->
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+
+create_doc(Url, Id) ->
+ test_request:put(Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH], "{\"mr\": \"rockoartischocko\"}").
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+all_test_() ->
+ {
+ "chttpd db tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_return_ok_true_on_bulk_update/1,
+ fun should_return_ok_true_on_ensure_full_commit/1,
+ fun should_return_404_for_ensure_full_commit_on_no_db/1,
+ fun should_accept_live_as_an_alias_for_continuous/1,
+ fun should_return_404_for_delete_att_on_notadoc/1,
+ fun should_return_409_for_del_att_without_rev/1,
+ fun should_return_200_for_del_att_with_rev/1,
+ fun should_return_409_for_put_att_nonexistent_rev/1,
+ fun should_return_update_seq_when_set_on_all_docs/1,
+ fun should_not_return_update_seq_when_unset_on_all_docs/1,
+ fun should_return_correct_id_on_doc_copy/1,
+ fun should_return_400_for_bad_engine/1,
+ fun should_not_change_db_proper_after_rewriting_shardmap/1,
+ fun should_succeed_on_all_docs_with_queries_keys/1,
+ fun should_succeed_on_all_docs_with_queries_limit_skip/1,
+ fun should_succeed_on_all_docs_with_multiple_queries/1,
+ fun should_succeed_on_design_docs_with_queries_keys/1,
+ fun should_succeed_on_design_docs_with_queries_limit_skip/1,
+ fun should_succeed_on_design_docs_with_multiple_queries/1,
+ fun should_succeed_on_local_docs_with_queries_keys/1,
+ fun should_succeed_on_local_docs_with_queries_limit_skip/1,
+ fun should_succeed_on_local_docs_with_multiple_queries/1
+ ]
+ }
+ }
+ }.
+
+
+should_return_ok_true_on_bulk_update(Url) ->
+ {timeout, ?TIMEOUT, ?_assertEqual(true,
+ begin
+ {ok, _, _, Body} = create_doc(Url, "testdoc"),
+ {Json} = ?JSON_DECODE(Body),
+ Ref = couch_util:get_value(<<"rev">>, Json, undefined),
+ NewDoc = "{\"docs\": [{\"_rev\": \"" ++ ?b2l(Ref) ++ "\", \"_id\": \"testdoc\"}]}",
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = lists:nth(1, ResultJson),
+ couch_util:get_value(<<"ok">>, InnerJson, undefined)
+ end)}.
+
+
+should_return_ok_true_on_ensure_full_commit(Url0) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ Url = Url0 ++ "/_ensure_full_commit",
+ {ok, RC, _, Body} = test_request:post(Url, [?CONTENT_JSON, ?AUTH], []),
+ {Json} = ?JSON_DECODE(Body),
+ ?assertEqual(201, RC),
+ ?assert(couch_util:get_value(<<"ok">>, Json))
+ end)}.
+
+
+should_return_404_for_ensure_full_commit_on_no_db(Url0) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ Url = Url0 ++ "-missing-db" ++ "/_ensure_full_commit",
+ {ok, RC, _, Body} = test_request:post(Url, [?CONTENT_JSON, ?AUTH], []),
+ {Json} = ?JSON_DECODE(Body),
+ ?assertEqual(404, RC),
+ ?assertEqual(<<"not_found">>, couch_util:get_value(<<"error">>, Json))
+ end)}.
+
+
+should_accept_live_as_an_alias_for_continuous(Url) ->
+ GetLastSeq = fun(Bin) ->
+ Parts = binary:split(Bin, <<"\n">>, [global]),
+ Filtered = [P || P <- Parts, size(P) > 0],
+ LastSeqBin = lists:last(Filtered),
+ {Result} = try ?JSON_DECODE(LastSeqBin) of
+ Data -> Data
+ catch
+ _:_ ->
+ ?assert(false) % should not happen, abort
+ end,
+ couch_util:get_value(<<"last_seq">>, Result, undefined)
+ end,
+ {timeout, ?TIMEOUT, ?_test(begin
+ {ok, _, _, ResultBody1} =
+ test_request:get(Url ++ "/_changes?feed=live&timeout=1", [?AUTH]),
+ LastSeq1 = GetLastSeq(ResultBody1),
+
+ {ok, _, _, _} = create_doc(Url, "testdoc2"),
+ {ok, _, _, ResultBody2} =
+ test_request:get(Url ++ "/_changes?feed=live&timeout=1", [?AUTH]),
+ LastSeq2 = GetLastSeq(ResultBody2),
+
+ ?assertNotEqual(LastSeq1, LastSeq2)
+ end)}.
+
+
+should_return_404_for_delete_att_on_notadoc(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ {ok, RC, _, RespBody} = test_request:delete(
+ Url ++ "/notadoc/att.pdf",
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ ?assertEqual(404, RC),
+ ?assertEqual(
+ {[{<<"error">>,<<"not_found">>},
+ {<<"reason">>,<<"missing">>}]},
+ jiffy:decode(RespBody)
+ ),
+ {ok, RC1, _, _} = test_request:get(
+ Url ++ "/notadoc",
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ ?assertEqual(404, RC1)
+ end)}.
+
+
+should_return_409_for_del_att_without_rev(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ {ok, RC, _, _} = test_request:put(
+ Url ++ "/testdoc3",
+ [?CONTENT_JSON, ?AUTH],
+ jiffy:encode(attachment_doc())
+ ),
+ ?assertEqual(201, RC),
+
+ {ok, RC1, _, _} = test_request:delete(
+ Url ++ "/testdoc3/file.erl",
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ ?assertEqual(409, RC1)
+ end)}.
+
+
+should_return_200_for_del_att_with_rev(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ {ok, RC, _Headers, RespBody} = test_request:put(
+ Url ++ "/testdoc4",
+ [?CONTENT_JSON, ?AUTH],
+ jiffy:encode(attachment_doc())
+ ),
+ ?assertEqual(201, RC),
+
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ Rev = couch_util:get_value(<<"rev">>, ResultJson, undefined),
+
+ {ok, RC1, _, _} = test_request:delete(
+ Url ++ "/testdoc4/file.erl?rev=" ++ Rev,
+ [?CONTENT_JSON, ?AUTH],
+ []
+ ),
+ ?assertEqual(200, RC1)
+ end)}.
+
+
+should_return_409_for_put_att_nonexistent_rev(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ {ok, RC, _Headers, RespBody} = test_request:put(
+ Url ++ "/should_return_404/file.erl?rev=1-000",
+ [?CONTENT_JSON, ?AUTH],
+ jiffy:encode(attachment_doc())
+ ),
+ ?assertEqual(409, RC),
+ ?assertMatch({[
+ {<<"error">>,<<"not_found">>},
+ {<<"reason">>,<<"missing_rev">>}]},
+ ?JSON_DECODE(RespBody))
+ end)}.
+
+
+should_return_update_seq_when_set_on_all_docs(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 3)],
+ {ok, RC, _, RespBody} = test_request:get(Url ++ "/_all_docs/"
+ ++ "?update_seq=true&keys=[\"testdoc1\"]",[?CONTENT_JSON, ?AUTH]),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ?assertNotEqual(undefined,
+ couch_util:get_value(<<"update_seq">>, ResultJson)),
+ ?assertNotEqual(undefined,
+ couch_util:get_value(<<"offset">>, ResultJson))
+ end)}.
+
+
+should_not_return_update_seq_when_unset_on_all_docs(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 3)],
+ {ok, RC, _, RespBody} = test_request:get(Url ++ "/_all_docs/"
+ ++ "?update_seq=false&keys=[\"testdoc1\"]",[?CONTENT_JSON, ?AUTH]),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ?assertEqual(undefined,
+ couch_util:get_value(<<"update_seq">>, ResultJson)),
+ ?assertNotEqual(undefined,
+ couch_util:get_value(<<"offset">>, ResultJson))
+ end)}.
+
+
+should_return_correct_id_on_doc_copy(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ {ok, _, _, _} = create_doc(Url, "testdoc"),
+ {_, _, _, ResultBody1} = test_request:copy(Url ++ "/testdoc/",
+ [?CONTENT_JSON, ?AUTH, ?DESTHEADER1]),
+ {ResultJson1} = ?JSON_DECODE(ResultBody1),
+ Id1 = couch_util:get_value(<<"id">>, ResultJson1),
+
+ {_, _, _, ResultBody2} = test_request:copy(Url ++ "/testdoc/",
+ [?CONTENT_JSON, ?AUTH, ?DESTHEADER2]),
+ {ResultJson2} = ?JSON_DECODE(ResultBody2),
+ Id2 = couch_util:get_value(<<"id">>, ResultJson2),
+ [
+ ?assertEqual(<<102,111,111,229,149,138,98,97,114>>, Id1),
+ ?assertEqual(<<"foo/bar#baz?pow:fiz">>, Id2)
+ ]
+ end)}.
+
+
+attachment_doc() ->
+ {ok, Data} = file:read_file(?FIXTURE_TXT),
+ {[
+ {<<"_attachments">>, {[
+ {<<"file.erl">>, {[
+ {<<"content_type">>, <<"text/plain">>},
+ {<<"data">>, base64:encode(Data)}
+ ]}
+ }]}}
+ ]}.
+
+
+should_return_400_for_bad_engine(_) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ BaseUrl = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
+ Url = BaseUrl ++ "?engine=cowabunga",
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ ?assertEqual(400, Status)
+ end)}.
+
+
+should_not_change_db_proper_after_rewriting_shardmap(_) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ AdmPort = mochiweb_socket_server:get(couch_httpd, port),
+
+ BaseUrl = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
+ Url = BaseUrl ++ "?partitioned=true&q=1",
+ {ok, 201, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+
+ ShardDbName = ?l2b(config:get("mem3", "shards_db", "_dbs")),
+ {ok, ShardDb} = mem3_util:ensure_exists(ShardDbName),
+ {ok, #doc{body = {Props}}} = couch_db:open_doc(
+ ShardDb, TmpDb, [ejson_body]),
+ Shards = mem3_util:build_shards(TmpDb, Props),
+
+ {Prop2} = ?JSON_DECODE(?JSON_ENCODE({Props})),
+ Shards2 = mem3_util:build_shards(TmpDb, Prop2),
+ ?assertEqual(Shards2, Shards)
+ end)}.
+
+
+should_succeed_on_all_docs_with_queries_keys(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"keys\": [ \"testdoc3\", \"testdoc8\"]}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++ "/_all_docs/queries/",
+ [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
+
+
+should_succeed_on_all_docs_with_queries_limit_skip(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++ "/_all_docs/queries/",
+ [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, couch_util:get_value(<<"offset">>, InnerJson)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
+
+
+should_succeed_on_all_docs_with_multiple_queries(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"keys\": [ \"testdoc3\", \"testdoc8\"]},
+ {\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++ "/_all_docs/queries/",
+ [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson1} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
+ {InnerJson2} = lists:nth(2, ResultJsonBody),
+ ?assertEqual(2, couch_util:get_value(<<"offset">>, InnerJson2)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
+ end)}.
+
+
+should_succeed_on_design_docs_with_queries_keys(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "_design/ddoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"keys\": [ \"_design/ddoc3\",
+ \"_design/ddoc8\"]}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++
+ "/_design_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
+
+
+should_succeed_on_design_docs_with_queries_limit_skip(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "_design/ddoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++
+ "/_design_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, couch_util:get_value(<<"offset">>, InnerJson)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
+
+
+should_succeed_on_design_docs_with_multiple_queries(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "_design/ddoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"keys\": [ \"_design/ddoc3\",
+ \"_design/ddoc8\"]}, {\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++
+ "/_design_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson1} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
+ {InnerJson2} = lists:nth(2, ResultJsonBody),
+ ?assertEqual(2, couch_util:get_value(<<"offset">>, InnerJson2)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
+ end)}.
+
+
+should_succeed_on_local_docs_with_queries_keys(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "_local/doc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"keys\":
+ [ \"_local/doc3\", \"_local/doc8\"]}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++ "/_local_docs/queries/",
+ [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
+
+
+should_succeed_on_local_docs_with_queries_limit_skip(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "_local/doc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++
+ "/_local_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
+
+
+should_succeed_on_local_docs_with_multiple_queries(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "_local/doc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ QueryDoc = "{\"queries\": [{\"keys\": [ \"_local/doc3\",
+ \"_local/doc8\"]}, {\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++
+ "/_local_docs/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson1} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
+ {InnerJson2} = lists:nth(2, ResultJsonBody),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
+ end)}.
diff --git a/src/chttpd/test/eunit/chttpd_dbs_info_test.erl b/src/chttpd/test/eunit/chttpd_dbs_info_test.erl
new file mode 100644
index 000000000..5b61d8831
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_dbs_info_test.erl
@@ -0,0 +1,169 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_dbs_info_test).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(USER, "chttpd_db_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+
+
+setup() ->
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Url = lists:concat(["http://", Addr, ":", Port, "/"]),
+ Db1Url = lists:concat([Url, "db1"]),
+ create_db(Db1Url),
+ Db2Url = lists:concat([Url, "db2"]),
+ create_db(Db2Url),
+ Url.
+
+teardown(Url) ->
+ Db1Url = lists:concat([Url, "db1"]),
+ Db2Url = lists:concat([Url, "db2"]),
+ delete_db(Db1Url),
+ delete_db(Db2Url),
+ ok = config:delete("admins", ?USER, _Persist=false).
+
+create_db(Url) ->
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+dbs_info_test_() ->
+ {
+ "chttpd dbs info tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_return_error_for_get_db_info/1,
+ fun should_return_dbs_info_for_single_db/1,
+ fun should_return_dbs_info_for_multiple_dbs/1,
+ fun should_return_error_for_exceeded_keys/1,
+ fun should_return_error_for_missing_keys/1,
+ fun should_return_dbs_info_for_dbs_with_mixed_state/1
+ ]
+ }
+ }
+ }.
+
+
+should_return_error_for_get_db_info(Url) ->
+ ?_test(begin
+ {ok, Code, _, ResultBody} = test_request:get(Url ++ "/_dbs_info?"
+ ++ "keys=[\"db1\"]", [?CONTENT_JSON, ?AUTH]),
+ {Body} = jiffy:decode(ResultBody),
+ [
+ ?assertEqual(<<"method_not_allowed">>,
+ couch_util:get_value(<<"error">>, Body)),
+ ?assertEqual(405, Code)
+ ]
+ end).
+
+
+should_return_dbs_info_for_single_db(Url) ->
+ ?_test(begin
+ NewDoc = "{\"keys\": [\"db1\"]}",
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+ BodyJson = jiffy:decode(ResultBody),
+ {Db1Data} = lists:nth(1, BodyJson),
+ [
+ ?assertEqual(<<"db1">>,
+ couch_util:get_value(<<"key">>, Db1Data)),
+ ?assertNotEqual(undefined,
+ couch_util:get_value(<<"info">>, Db1Data))
+ ]
+ end).
+
+
+should_return_dbs_info_for_multiple_dbs(Url) ->
+ ?_test(begin
+ NewDoc = "{\"keys\": [\"db1\", \"db2\"]}",
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+ BodyJson = jiffy:decode(ResultBody),
+ {Db1Data} = lists:nth(1, BodyJson),
+ {Db2Data} = lists:nth(2, BodyJson),
+ [
+ ?assertEqual(<<"db1">>,
+ couch_util:get_value(<<"key">>, Db1Data)),
+ ?assertNotEqual(undefined,
+ couch_util:get_value(<<"info">>, Db1Data)),
+ ?assertEqual(<<"db2">>,
+ couch_util:get_value(<<"key">>, Db2Data)),
+ ?assertNotEqual(undefined,
+ couch_util:get_value(<<"info">>, Db2Data))
+ ]
+ end).
+
+
+should_return_error_for_exceeded_keys(Url) ->
+ ?_test(begin
+ NewDoc = "{\"keys\": [\"db1\", \"db2\"]}",
+ ok = config:set("chttpd", "max_db_number_for_dbs_info_req", "1"),
+ {ok, Code, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+ {Body} = jiffy:decode(ResultBody),
+ ok = config:delete("chttpd", "max_db_number_for_dbs_info_req"),
+ [
+ ?assertEqual(<<"bad_request">>,
+ couch_util:get_value(<<"error">>, Body)),
+ ?assertEqual(400, Code)
+ ]
+ end).
+
+
+should_return_error_for_missing_keys(Url) ->
+ ?_test(begin
+ NewDoc = "{\"missingkeys\": [\"db1\", \"db2\"]}",
+ {ok, Code, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+ {Body} = jiffy:decode(ResultBody),
+ [
+ ?assertEqual(<<"bad_request">>,
+ couch_util:get_value(<<"error">>, Body)),
+ ?assertEqual(400, Code)
+ ]
+ end).
+
+
+should_return_dbs_info_for_dbs_with_mixed_state(Url) ->
+ ?_test(begin
+ NewDoc = "{\"keys\": [\"db1\", \"noexisteddb\"]}",
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_dbs_info/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+ Json = jiffy:decode(ResultBody),
+ {Db1Data} = lists:nth(1, Json),
+ {Db2Data} = lists:nth(2, Json),
+ [
+ ?assertEqual(
+ <<"db1">>, couch_util:get_value(<<"key">>, Db1Data)),
+ ?assertNotEqual(undefined,
+ couch_util:get_value(<<"info">>, Db1Data)),
+ ?assertEqual(
+ <<"noexisteddb">>, couch_util:get_value(<<"key">>, Db2Data)),
+ ?assertEqual(undefined, couch_util:get_value(<<"info">>, Db2Data))
+ ]
+ end).
diff --git a/src/chttpd/test/eunit/chttpd_error_info_tests.erl b/src/chttpd/test/eunit/chttpd_error_info_tests.erl
new file mode 100644
index 000000000..fdb015c08
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_error_info_tests.erl
@@ -0,0 +1,168 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_error_info_tests).
+
+-include_lib("eunit/include/eunit.hrl").
+
+
+error_info_test() ->
+ Error = <<"error">>,
+ Reason = <<"reason">>,
+ ArgResult = [
+ {
+ bad_request,
+ {400, <<"bad_request">>, <<>>}
+ },
+ {
+ {bad_request, Reason},
+ {400, <<"bad_request">>, Reason}
+ },
+ {
+ {bad_request, "error", "reason"},
+ {400, Error, Reason}
+ },
+ {
+ {query_parse_error, Reason},
+ {400, <<"query_parse_error">>, Reason}
+ },
+ {
+ database_does_not_exist,
+ {404, <<"not_found">>, <<"Database does not exist.">>}
+ },
+ {
+ not_found,
+ {404, <<"not_found">>, <<"missing">>}
+ },
+ {
+ {not_found, Reason},
+ {404, <<"not_found">>, Reason}
+ },
+ {
+ {not_acceptable, Reason},
+ {406, <<"not_acceptable">>, Reason}
+ },
+ {
+ conflict,
+ {409, <<"conflict">>, <<"Document update conflict.">>}
+ },
+ {
+ {conflict, Reason},
+ %% yes, the reason is ignored
+ {409, <<"conflict">>, <<"Document update conflict.">>}
+ },
+ {
+ {forbidden, Reason},
+ {403, <<"forbidden">>, Reason}
+ },
+ {
+ {forbidden, Error, Reason},
+ {403, Error, Reason}
+ },
+ {
+ {unauthorized, Reason},
+ {401, <<"unauthorized">>, Reason}
+ },
+ {
+ file_exists,
+ {412, <<"file_exists">>,
+ <<"The database could not be created, the file already exists.">>}
+ },
+ {
+ {error, {nodedown, Reason}}, {412, <<"nodedown">>, Reason}
+ },
+ {
+ {maintenance_mode, Reason},
+ {412, <<"nodedown">>, Reason}
+ },
+ {
+ {maintenance_mode, nil, Reason},
+ {412, <<"nodedown">>, Reason}
+ },
+ {
+ {w_quorum_not_met, Reason},
+ {500, <<"write_quorum_not_met">>, Reason}
+ },
+ {
+ request_uri_too_long,
+ {414, <<"too_long">>, <<"the request uri is too long">>}
+ },
+ {
+ {bad_ctype, Reason},
+ {415, <<"bad_content_type">>, Reason}
+ },
+ {
+ requested_range_not_satisfiable,
+ {416, <<"requested_range_not_satisfiable">>,
+ <<"Requested range not satisfiable">>}
+ },
+ {
+ {error, {illegal_database_name, <<"foo">>}},
+ {400, <<"illegal_database_name">>,
+ <<"Name: 'foo'. Only lowercase characters (a-z), digits (0-9), and any of"
+ " the characters _, $, (, ), +, -, and / are allowed."
+ " Must begin with a letter.">>}
+ },
+ {
+ {Error, {illegal_docid,1}},
+ {400, <<"illegal_docid">>, 1}
+ },
+ {
+ {missing_stub, Reason},
+ {412, <<"missing_stub">>, Reason}
+ },
+ {
+ request_entity_too_large,
+ {413, <<"too_large">>, <<"the request entity is too large">>}
+ },
+ {
+ not_implemented,
+ {501, <<"not_implemented">>,
+ <<"this feature is not yet implemented">>}
+ },
+ {
+ timeout,
+ {500, <<"timeout">>,
+ <<"The request could not be processed in a reasonable"
+ " amount of time.">>}
+ },
+ {
+ {timeout, Error},
+ {500, <<"timeout">>,
+ <<"The request could not be processed in a reasonable"
+ " amount of time.">>}
+ },
+ {
+ {Error, null},
+ {500, <<"unknown_error">>, Error}
+ },
+ {
+ {Error, Reason},
+ {500, Error, Reason}
+ },
+ {
+ {Error, nil, [{}]},
+ {500, <<"unknown_error">>, Error}
+ },
+ {
+ {Error, Reason, [{}]},
+ {500, Error, Reason}
+ },
+ {
+ Error,
+ {500, <<"unknown_error">>, Error}
+ }
+ ],
+
+ lists:foreach(fun({Arg, Result}) ->
+ ?assertEqual(Result, chttpd:error_info(Arg))
+ end, ArgResult).
diff --git a/src/chttpd/test/eunit/chttpd_handlers_tests.erl b/src/chttpd/test/eunit/chttpd_handlers_tests.erl
new file mode 100644
index 000000000..f3e8f5dcd
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_handlers_tests.erl
@@ -0,0 +1,87 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_handlers_tests).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+
+setup() ->
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ BaseUrl = lists:concat(["http://", Addr, ":", Port]),
+ BaseUrl.
+
+teardown(_Url) ->
+ ok.
+
+
+replicate_test_() ->
+ {
+ "_replicate",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_escape_dbname_on_replicate/1
+ ]
+ }
+ }
+ }.
+
+
+should_escape_dbname_on_replicate(Url) ->
+ ?_test(
+ begin
+ UrlBin = ?l2b(Url),
+ Request = couch_util:json_encode({[
+ {<<"source">>, <<UrlBin/binary, "/foo%2Fbar">>},
+ {<<"target">>, <<"bar/baz">>},
+ {<<"create_target">>, true}
+ ]}),
+ {ok, 200, _, Body} = request_replicate(Url ++ "/_replicate", Request),
+ JSON = couch_util:json_decode(Body),
+
+ Source = json_value(JSON, [<<"source">>]),
+ Target = json_value(JSON, [<<"target">>, <<"url">>]),
+ ?assertEqual(<<UrlBin/binary, "/foo%2Fbar">>, Source),
+ ?assertEqual(<<UrlBin/binary, "/bar%2Fbaz">>, Target)
+ end).
+
+
+json_value(JSON, Keys) ->
+ couch_util:get_nested_json_value(JSON, Keys).
+
+request_replicate(Url, Body) ->
+ Headers = [{"Content-Type", "application/json"}],
+ Handler = {chttpd_misc, handle_replicate_req},
+ request(post, Url, Headers, Body, Handler, fun(Req) ->
+ chttpd:send_json(Req, 200, get(post_body))
+ end).
+
+request(Method, Url, Headers, Body, {M, F}, MockFun) ->
+ meck:new(M, [passthrough, non_strict]),
+ try
+ meck:expect(M, F, MockFun),
+ Result = test_request:Method(Url, Headers, Body),
+ ?assert(meck:validate(M)),
+ Result
+ catch Kind:Reason ->
+ {Kind, Reason}
+ after
+ meck:unload(M)
+ end.
diff --git a/src/chttpd/test/eunit/chttpd_open_revs_error_test.erl b/src/chttpd/test/eunit/chttpd_open_revs_error_test.erl
new file mode 100644
index 000000000..d53d370f8
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_open_revs_error_test.erl
@@ -0,0 +1,112 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_open_revs_error_test).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(USER, "chttpd_db_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+-define(CONTENT_MULTI_FORM, {"Content-Type",
+ "multipart/form-data;boundary=\"bound\""}).
+
+setup() ->
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
+ mock(fabric),
+ create_db(Url),
+ Url.
+
+teardown(Url) ->
+ delete_db(Url),
+ (catch meck:unload(fabric)),
+ ok = config:delete("admins", ?USER, _Persist=false).
+
+create_db(Url) ->
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+
+create_doc(Url, Id) ->
+ test_request:put(Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH], "{\"mr\": \"rockoartischocko\"}").
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+open_revs_error_test_() ->
+ {
+ "open revs error tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_return_503_error_for_open_revs_get/1,
+ fun should_return_503_error_for_open_revs_post_form/1
+ ]
+ }
+ }
+ }.
+
+should_return_503_error_for_open_revs_get(Url) ->
+ {ok, _, _, Body} = create_doc(Url, "testdoc"),
+ {Json} = ?JSON_DECODE(Body),
+ Ref = couch_util:get_value(<<"rev">>, Json, undefined),
+ mock_open_revs({error, all_workers_died}),
+ {ok, Code, _, _} = test_request:get(Url ++
+ "/testdoc?rev=" ++ ?b2l(Ref), [?AUTH]),
+ ?_assertEqual(503, Code).
+
+should_return_503_error_for_open_revs_post_form(Url) ->
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Host = lists:concat([ "http://127.0.0.1:", Port]),
+ Referer = {"Referer", Host},
+ Body1 = "{\"body\":\"This is a body.\"}",
+ DocBeg = "--bound\r\nContent-Disposition: form-data; name=\"_doc\"\r\n\r\n",
+ DocRev = "--bound\r\nContent-Disposition: form-data; name=\"_rev\"\r\n\r\n",
+ DocRest = "\r\n--bound\r\nContent-Disposition:"
+ "form-data; name=\"_attachments\"; filename=\"file.txt\"\r\n"
+ "Content-Type: text/plain\r\n\r\ncontents of file.txt\r\n\r\n"
+ "--bound--",
+ Doc1 = lists:concat([DocBeg, Body1, DocRest]),
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/" ++ "RevDoc",
+ [?CONTENT_MULTI_FORM, ?AUTH, Referer], Doc1),
+ {Json} = ?JSON_DECODE(ResultBody),
+ Ref = couch_util:get_value(<<"rev">>, Json, undefined),
+ Doc2 = lists:concat([DocRev, ?b2l(Ref) , DocRest]),
+
+ mock_open_revs({error, all_workers_died}),
+ {ok, Code, _, ResultBody1} = test_request:post(Url ++ "/" ++ "RevDoc",
+ [?CONTENT_MULTI_FORM, ?AUTH, Referer], Doc2),
+ {Json1} = ?JSON_DECODE(ResultBody1),
+ ErrorMessage = couch_util:get_value(<<"error">>, Json1),
+ [
+ ?_assertEqual(503, Code),
+ ?_assertEqual(<<"service unvailable">>, ErrorMessage)
+ ].
+
+mock_open_revs(RevsResp) ->
+ ok = meck:expect(fabric, open_revs, fun(_, _, _, _) -> RevsResp end).
+
+mock(fabric) ->
+ ok = meck:new(fabric, [passthrough]).
diff --git a/src/chttpd/test/eunit/chttpd_plugin_tests.erl b/src/chttpd/test/eunit/chttpd_plugin_tests.erl
new file mode 100644
index 000000000..36572a419
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_plugin_tests.erl
@@ -0,0 +1,187 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_plugin_tests).
+
+-export([
+ before_request/1,
+ after_request/2,
+ handle_error/1,
+ before_response/4,
+ before_serve_file/5
+]).
+
+-export([ %% couch_epi_plugin behaviour
+ app/0,
+ providers/0,
+ services/0,
+ data_providers/0,
+ data_subscriptions/0,
+ processes/0,
+ notify/3
+]).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+%% couch_epi_plugin behaviour
+
+app() -> test_app.
+providers() -> [{chttpd, ?MODULE}].
+services() -> [].
+data_providers() -> [].
+data_subscriptions() -> [].
+processes() -> [].
+notify(_, _, _) -> ok.
+
+
+setup() ->
+ couch_tests:setup([
+ couch_epi_dispatch:dispatch(chttpd, ?MODULE)
+ ]).
+
+teardown(Ctx) ->
+ couch_tests:teardown(Ctx).
+
+before_request({true, Id}) -> [{true, [{before_request, Id}]}];
+before_request({false, Id}) -> [{false, Id}];
+before_request({fail, Id}) -> throw({before_request, Id}).
+
+after_request({true, Id}, A) -> [{true, [{after_request, Id}]}, A];
+after_request({false, Id}, A) -> [{false, Id}, A];
+after_request({fail, Id}, _A) -> throw({after_request, Id}).
+
+handle_error({true, Id}) -> [{true, [{handle_error, Id}]}];
+handle_error({false, Id}) -> [{false, Id}];
+handle_error({fail, Id}) -> throw({handle_error, Id}).
+
+before_response({true, Id}, A, B, C) ->
+ [{true, [{before_response, Id}]}, A, B, C];
+before_response({false, Id}, A, B, C) ->
+ [{false, Id}, A, B, C];
+before_response({fail, Id}, _A, _B, _C) ->
+ throw({before_response, Id}).
+
+before_serve_file({true, Id}, A, B, C, D) ->
+ [{true, [{before_serve_file, Id}]}, A, B, C, D];
+before_serve_file({false, Id}, A, B, C, D) ->
+ [{false, Id}, A, B, C, D];
+before_serve_file({fail, _Id}, _A, _B, _C, _D) ->
+ throw(before_serve_file).
+
+callback_test_() ->
+ {
+ "callback tests",
+ {
+ setup, fun setup/0, fun teardown/1,
+ [
+ fun before_request_match/0,
+ fun before_request_no_match/0,
+ fun before_request_throw/0,
+
+ fun after_request_match/0,
+ fun after_request_no_match/0,
+ fun after_request_throw/0,
+
+ fun handle_error_match/0,
+ fun handle_error_no_match/0,
+ fun handle_error_throw/0,
+
+ fun before_response_match/0,
+ fun before_response_no_match/0,
+ fun before_response_throw/0,
+
+ fun before_serve_file_match/0,
+ fun before_serve_file_no_match/0,
+ fun before_serve_file_throw/0
+ ]
+ }
+ }.
+
+
+before_request_match() ->
+ ?assertEqual(
+ {ok, {true, [{before_request, foo}]}},
+ chttpd_plugin:before_request({true, foo})).
+
+before_request_no_match() ->
+ ?assertEqual(
+ {ok, {false, foo}},
+ chttpd_plugin:before_request({false, foo})).
+
+before_request_throw() ->
+ ?assertThrow(
+ {before_request, foo},
+ chttpd_plugin:before_request({fail, foo})).
+
+
+after_request_match() ->
+ ?assertEqual(
+ {ok, bar},
+ chttpd_plugin:after_request({true, foo}, bar)).
+
+after_request_no_match() ->
+ ?assertEqual(
+ {ok, bar},
+ chttpd_plugin:after_request({false, foo}, bar)).
+
+after_request_throw() ->
+ ?assertThrow(
+ {after_request, foo},
+ chttpd_plugin:after_request({fail, foo}, bar)).
+
+
+handle_error_match() ->
+ ?assertEqual(
+ {true, [{handle_error, foo}]},
+ chttpd_plugin:handle_error({true, foo})).
+
+handle_error_no_match() ->
+ ?assertEqual(
+ {false, foo},
+ chttpd_plugin:handle_error({false, foo})).
+
+handle_error_throw() ->
+ ?assertThrow(
+ {handle_error, foo},
+ chttpd_plugin:handle_error({fail, foo})).
+
+before_response_match() ->
+ ?assertEqual(
+ {ok, {{true, [{before_response, foo}]}, 1, 2, 3}},
+ chttpd_plugin:before_response({true, foo}, 1, 2, 3)).
+
+before_response_no_match() ->
+ ?assertEqual(
+ {ok, {{false, foo}, 1, 2, 3}},
+ chttpd_plugin:before_response({false, foo}, 1, 2, 3)).
+
+before_response_throw() ->
+ ?assertThrow(
+ {before_response, foo},
+ chttpd_plugin:before_response({fail, foo}, 1, 2, 3)).
+
+
+before_serve_file_match() ->
+ ?assertEqual(
+ {ok, {{true, [{before_serve_file, foo}]}, 1, 2, 3, 4}},
+ chttpd_plugin:before_serve_file({true, foo}, 1, 2, 3, 4)).
+
+before_serve_file_no_match() ->
+ ?assertEqual(
+ {ok, {{false, foo}, 1, 2, 3, 4}},
+ chttpd_plugin:before_serve_file({false, foo}, 1, 2, 3, 4)).
+
+before_serve_file_throw() ->
+ ?assertThrow(
+ before_serve_file,
+ chttpd_plugin:before_serve_file({fail, foo}, 1, 2, 3, 4)).
diff --git a/src/chttpd/test/eunit/chttpd_prefer_header_test.erl b/src/chttpd/test/eunit/chttpd_prefer_header_test.erl
new file mode 100644
index 000000000..0f43ba437
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_prefer_header_test.erl
@@ -0,0 +1,112 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_prefer_header_test).
+
+-compile(tuple_calls).
+
+-include_lib("couch/include/couch_db.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+
+mock_request(ExcludeHeader) ->
+ Headers = mochiweb_headers:make(ExcludeHeader),
+ MochiReq = mochiweb_request:new(nil, 'GET', "/", {1, 1}, Headers),
+ MochiReq:cleanup(),
+ #httpd{mochi_req = MochiReq}.
+
+
+default_headers() ->
+ [
+ {"Cache-Control","must-revalidate"},
+ {"Content-Type","application/json"},
+ {"Content-Length", "100"},
+ {"ETag","\"12343\""},
+ {"X-Couch-Request-ID","7bd1adab86"},
+ {"X-CouchDB-Body-Time","0"},
+ {"Vary", "Accept-Encoding"},
+ {"Server","CouchDB/2.1.0-f1a1d7f1c (Erlang OTP/19)"}
+ ].
+
+
+minimal_options_headers() ->
+ [
+ {"Cache-Control","must-revalidate"},
+ {"Content-Type","application/json"},
+ {"Content-Length", "100"},
+ {"ETag","\"12343\""},
+ {"Vary", "Accept-Encoding"},
+ {"Server","CouchDB/2.1.0-f1a1d7f1c (Erlang OTP/19)"}
+ ].
+
+
+default_no_exclude_header_test() ->
+ Headers = chttpd_prefer_header:maybe_return_minimal(
+ mock_request([]),
+ default_headers()
+ ),
+ ?assertEqual(default_headers(), Headers).
+
+
+unsupported_exclude_header_test() ->
+ Req = mock_request([{"prefer", "Wrong"}]),
+ Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
+ ?assertEqual(default_headers(), Headers).
+
+
+empty_header_test() ->
+ Req = mock_request([{"prefer", ""}]),
+ Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
+ ?assertEqual(default_headers(), Headers).
+
+setup() ->
+ ok = meck:new(config),
+ ok = meck:expect(config, get, fun("chttpd", "prefer_minimal", _) ->
+ "Cache-Control, Content-Length, Content-Type, ETag, Server, Vary"
+ end),
+ ok.
+
+
+teardown(_) ->
+ meck:unload(config).
+
+
+exclude_headers_test_() ->
+ {
+ "Test Prefer headers",
+ {
+ foreach, fun setup/0, fun teardown/1,
+ [
+ fun minimal_options/1,
+ fun minimal_options_check_header_case/1,
+ fun minimal_options_check_header_value_case/1
+ ]
+ }
+ }.
+
+
+minimal_options(_) ->
+ Req = mock_request([{"Prefer", "return=minimal"}]),
+ Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
+ ?_assertEqual(minimal_options_headers(), Headers).
+
+
+minimal_options_check_header_case(_) ->
+ Req = mock_request([{"prefer", "return=minimal"}]),
+ Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
+ ?_assertEqual(minimal_options_headers(), Headers).
+
+
+minimal_options_check_header_value_case(_) ->
+ Req = mock_request([{"prefer", "RETURN=MINIMAL"}]),
+ Headers = chttpd_prefer_header:maybe_return_minimal(Req, default_headers()),
+ ?_assertEqual(minimal_options_headers(), Headers). \ No newline at end of file
diff --git a/src/chttpd/test/eunit/chttpd_purge_tests.erl b/src/chttpd/test/eunit/chttpd_purge_tests.erl
new file mode 100644
index 000000000..dbd73de1f
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_purge_tests.erl
@@ -0,0 +1,406 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_purge_tests).
+
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+
+-define(USER, "chttpd_db_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+
+
+setup() ->
+ ok = config:set("admins", ?USER, ?PASS, _Persist=false),
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
+ create_db(Url),
+ Url.
+
+
+teardown(Url) ->
+ delete_db(Url),
+ ok = config:delete("admins", ?USER, _Persist=false).
+
+
+create_db(Url) ->
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+
+create_doc(Url, Id) ->
+ test_request:put(Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH], "{\"mr\": \"rockoartischocko\"}").
+
+create_doc(Url, Id, Content) ->
+ test_request:put(Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH], "{\"mr\": \"" ++ Content ++ "\"}").
+
+
+create_docs(Url, Docs) ->
+ test_request:post(Url ++ "/_bulk_docs",
+ [?CONTENT_JSON, ?AUTH], ?JSON_ENCODE({[{docs, Docs}]})).
+
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+
+purge_test_() ->
+ {
+ "chttpd db tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0,
+ fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0,
+ fun teardown/1,
+ [
+ fun test_empty_purge_request/1,
+ fun test_ok_purge_request/1,
+ fun test_ok_purge_request_with_101_docid/1,
+ fun test_accepted_purge_request/1,
+ fun test_partial_purge_request/1,
+ fun test_mixed_purge_request/1,
+ fun test_overmany_ids_or_revs_purge_request/1,
+ fun test_exceed_limits_on_purge_infos/1,
+ fun should_error_set_purged_docs_limit_to0/1,
+ fun test_timeout_set_purged_infos_limit/1
+ ]
+ }
+ }
+ }.
+
+
+test_empty_purge_request(Url) ->
+ ?_test(begin
+ IdsRevs = "{}",
+ {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ ?assert(Status =:= 201 orelse Status =:= 202),
+ ?assertEqual(
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>,{[]}}
+ ]},
+ ResultJson
+ )
+ end).
+
+
+test_ok_purge_request(Url) ->
+ ?_test(begin
+ {ok, _, _, Body} = create_doc(Url, "doc1"),
+ {Json} = ?JSON_DECODE(Body),
+ Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
+ {ok, _, _, Body2} = create_doc(Url, "doc2"),
+ {Json2} = ?JSON_DECODE(Body2),
+ Rev2 = couch_util:get_value(<<"rev">>, Json2, undefined),
+ {ok, _, _, Body3} = create_doc(Url, "doc3"),
+ {Json3} = ?JSON_DECODE(Body3),
+ Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
+
+ IdsRevsEJson = {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]},
+ IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
+
+ {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ ?assert(Status =:= 201 orelse Status =:= 202),
+ ?assertEqual(
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>, {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]}}
+ ]},
+ ResultJson
+ )
+ end).
+
+
+test_ok_purge_request_with_101_docid(Url) ->
+ ?_test(begin
+ PurgedDocsNum = 101,
+ Docs = lists:foldl(fun(I, Acc) ->
+ Id = list_to_binary(integer_to_list(I)),
+ Doc = {[{<<"_id">>, Id}, {value, I}]},
+ [Doc | Acc]
+ end, [], lists:seq(1, PurgedDocsNum)),
+
+ {ok, _, _, Body} = create_docs(Url, Docs),
+ BodyJson = ?JSON_DECODE(Body),
+
+ PurgeBody = lists:map(fun({DocResp}) ->
+ Id = couch_util:get_value(<<"id">>, DocResp, undefined),
+ Rev = couch_util:get_value(<<"rev">>, DocResp, undefined),
+ {Id, [Rev]}
+ end, BodyJson),
+
+ ok = config:set("purge", "max_document_id_number", "101"),
+ try
+ {ok, Status, _, _} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], ?JSON_ENCODE({PurgeBody})),
+ ?assert(Status =:= 201 orelse Status =:= 202)
+ after
+ ok = config:delete("purge", "max_document_id_number")
+ end
+ end).
+
+
+test_accepted_purge_request(Url) ->
+ ?_test(begin
+ {ok, _, _, Body} = create_doc(Url, "doc1"),
+ {Json} = ?JSON_DECODE(Body),
+ Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
+ IdsRevsEJson = {[
+ {<<"doc1">>, [Rev1]}
+ ]},
+ IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
+ meck:new(fabric, [passthrough]),
+ meck:expect(fabric, purge_docs,
+ fun(_, _, _) -> {accepted,[{accepted,[{1,
+ <<57,27,64,134,152,18,73,243,40,1,141,214,135,104,79,188>>}]}]}
+ end
+ ),
+ {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ meck:unload(fabric),
+ ?assert(Status =:= 202),
+ ?assertEqual(
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>, {[
+ {<<"doc1">>, [Rev1]}
+ ]}}
+ ]},
+ ResultJson
+ )
+ end).
+
+
+test_partial_purge_request(Url) ->
+ ?_test(begin
+ {ok, _, _, Body} = create_doc(Url, "doc1"),
+ {Json} = ?JSON_DECODE(Body),
+ Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
+
+ NewDoc = "{\"new_edits\": false, \"docs\": [{\"_id\": \"doc1\",
+ \"_revisions\": {\"start\": 1, \"ids\": [\"12345\", \"67890\"]},
+ \"content\": \"updated\", \"_rev\": \"" ++ ?b2l(Rev1) ++ "\"}]}",
+ {ok, _, _, _} = test_request:post(Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+
+ IdsRevsEJson = {[{<<"doc1">>, [Rev1]}]},
+ IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
+ {ok, Status, _, ResultBody} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ ?assert(Status =:= 201 orelse Status =:= 202),
+ ?assertEqual(
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>, {[
+ {<<"doc1">>, [Rev1]}
+ ]}}
+ ]},
+ ResultJson
+ ),
+ {ok, Status2, _, ResultBody2} = test_request:get(Url
+ ++ "/doc1/", [?AUTH]),
+ {Json2} = ?JSON_DECODE(ResultBody2),
+ Content = couch_util:get_value(<<"content">>, Json2, undefined),
+ ?assertEqual(<<"updated">>, Content),
+ ?assert(Status2 =:= 200)
+ end).
+
+
+test_mixed_purge_request(Url) ->
+ ?_test(begin
+ {ok, _, _, Body} = create_doc(Url, "doc1"),
+ {Json} = ?JSON_DECODE(Body),
+ Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
+
+ NewDoc = "{\"new_edits\": false, \"docs\": [{\"_id\": \"doc1\",
+ \"_revisions\": {\"start\": 1, \"ids\": [\"12345\", \"67890\"]},
+ \"content\": \"updated\", \"_rev\": \"" ++ ?b2l(Rev1) ++ "\"}]}",
+ {ok, _, _, _} = test_request:post(Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+
+ {ok, _, _, _Body2} = create_doc(Url, "doc2", "content2"),
+ {ok, _, _, Body3} = create_doc(Url, "doc3", "content3"),
+ {Json3} = ?JSON_DECODE(Body3),
+ Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
+
+
+ IdsRevsEJson = {[
+ {<<"doc1">>, [Rev1]}, % partial purge
+ {<<"doc2">>, [Rev3, Rev1]}, % correct format, but invalid rev
+ {<<"doc3">>, [Rev3]} % correct format and rev
+ ]},
+ IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
+ {ok, Status, _, Body4} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ ResultJson = ?JSON_DECODE(Body4),
+ ?assert(Status =:= 201 orelse Status =:= 202),
+ ?assertEqual(
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>, {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, []},
+ {<<"doc3">>, [Rev3]}
+ ]}}
+ ]},
+ ResultJson
+ ),
+ {ok, Status2, _, Body5} = test_request:get(Url
+ ++ "/doc1/", [?AUTH]),
+ {Json5} = ?JSON_DECODE(Body5),
+ Content = couch_util:get_value(<<"content">>, Json5, undefined),
+ ?assertEqual(<<"updated">>, Content),
+ ?assert(Status2 =:= 200)
+ end).
+
+
+test_overmany_ids_or_revs_purge_request(Url) ->
+ ?_test(begin
+ {ok, _, _, Body} = create_doc(Url, "doc1"),
+ {Json} = ?JSON_DECODE(Body),
+ Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
+
+ NewDoc = "{\"new_edits\": false, \"docs\": [{\"_id\": \"doc1\",
+ \"_revisions\": {\"start\": 1, \"ids\": [\"12345\", \"67890\"]},
+ \"content\": \"updated\", \"_rev\": \"" ++ ?b2l(Rev1) ++ "\"}]}",
+ {ok, _, _, _} = test_request:post(Url ++ "/_bulk_docs/",
+ [?CONTENT_JSON, ?AUTH], NewDoc),
+
+ {ok, _, _, _Body2} = create_doc(Url, "doc2", "content2"),
+ {ok, _, _, Body3} = create_doc(Url, "doc3", "content3"),
+ {Json3} = ?JSON_DECODE(Body3),
+ Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
+
+ IdsRevsEJson = {[
+ {<<"doc1">>, [Rev1]}, % partial purge
+ {<<"doc2">>, [Rev3, Rev1]}, % correct format, but invalid rev
+ {<<"doc3">>, [Rev3]} % correct format and rev
+ ]},
+ IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
+
+ % Ids larger than expected
+ config:set("purge", "max_document_id_number", "1"),
+ {ok, Status, _, Body4} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ config:delete("purge", "max_document_id_number"),
+ ResultJson = ?JSON_DECODE(Body4),
+ ?assertEqual(400, Status),
+ ?assertMatch({[
+ {<<"error">>,<<"bad_request">>},
+ {<<"reason">>,<<"Exceeded maximum number of documents.">>}]},
+ ResultJson),
+
+ % Revs larger than expected
+ config:set("purge", "max_revisions_number", "1"),
+ {ok, Status2, _, Body5} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ config:delete("purge", "max_revisions_number"),
+ ResultJson2 = ?JSON_DECODE(Body5),
+ ?assertEqual(400, Status2),
+ ?assertMatch({[
+ {<<"error">>,<<"bad_request">>},
+ {<<"reason">>,<<"Exceeded maximum number of revisions.">>}]},
+ ResultJson2)
+ end).
+
+
+test_exceed_limits_on_purge_infos(Url) ->
+ ?_test(begin
+ {ok, Status1, _, _} = test_request:put(Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?AUTH], "2"),
+ ?assert(Status1 =:= 200),
+
+ {ok, _, _, Body} = create_doc(Url, "doc1"),
+ {Json} = ?JSON_DECODE(Body),
+ Rev1 = couch_util:get_value(<<"rev">>, Json, undefined),
+ {ok, _, _, Body2} = create_doc(Url, "doc2"),
+ {Json2} = ?JSON_DECODE(Body2),
+ Rev2 = couch_util:get_value(<<"rev">>, Json2, undefined),
+ {ok, _, _, Body3} = create_doc(Url, "doc3"),
+ {Json3} = ?JSON_DECODE(Body3),
+ Rev3 = couch_util:get_value(<<"rev">>, Json3, undefined),
+
+ IdsRevsEJson = {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]},
+ IdsRevs = binary_to_list(?JSON_ENCODE(IdsRevsEJson)),
+
+ {ok, Status2, _, ResultBody} = test_request:post(Url ++ "/_purge/",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+
+ ResultJson = ?JSON_DECODE(ResultBody),
+ ?assert(Status2 =:= 201 orelse Status2 =:= 202),
+ ?assertEqual(
+ {[
+ {<<"purge_seq">>, null},
+ {<<"purged">>, {[
+ {<<"doc1">>, [Rev1]},
+ {<<"doc2">>, [Rev2]},
+ {<<"doc3">>, [Rev3]}
+ ]}}
+ ]},
+ ResultJson
+ )
+
+ end).
+
+
+should_error_set_purged_docs_limit_to0(Url) ->
+ ?_test(begin
+ {ok, Status, _, _} = test_request:put(Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?AUTH], "0"),
+ ?assert(Status =:= 400)
+ end).
+
+
+test_timeout_set_purged_infos_limit(Url) ->
+ ?_test(begin
+ meck:new(fabric, [passthrough]),
+ meck:expect(fabric, set_purge_infos_limit, fun(_, _, _) ->
+ {error, timeout} end),
+ {ok, Status, _, ResultBody} = test_request:put(Url
+ ++ "/_purged_infos_limit/", [?CONTENT_JSON, ?AUTH], "2"),
+ meck:unload(fabric),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ ?assert(Status =:= 500),
+ ?assertMatch({[
+ {<<"error">>,<<"error">>},
+ {<<"reason">>,<<"timeout">>}]},
+ ResultJson)
+ end).
diff --git a/src/chttpd/test/eunit/chttpd_security_tests.erl b/src/chttpd/test/eunit/chttpd_security_tests.erl
new file mode 100644
index 000000000..955b4ff01
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_security_tests.erl
@@ -0,0 +1,384 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_security_tests).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(USER, "chttpd_db_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+
+-define(TEST_MEMBER, "test_member").
+-define(TEST_MEMBER_PASS, "test_member_pass").
+-define(TEST_MEMBER_AUTH, {basic_auth, {?TEST_MEMBER, ?TEST_MEMBER_PASS}}).
+
+-define(TEST_ADMIN, "test_admin").
+-define(TEST_ADMIN_PASS, "test_admin_pass").
+-define(TEST_ADMIN_AUTH, {basic_auth, {?TEST_ADMIN, ?TEST_ADMIN_PASS}}).
+
+
+
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+-define(FIXTURE_TXT, ?ABS_PATH(?FILE)).
+
+setup() ->
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ Persist = false,
+ ok = config:set("admins", ?USER, ?b2l(Hashed), Persist),
+ UserDb = ?tempdb(),
+ TmpDb = ?tempdb(),
+ ok = config:set("chttpd_auth", "authentication_db", ?b2l(UserDb), Persist),
+
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ BaseUrl = lists:concat(["http://", Addr, ":", Port, "/"]),
+ Url = lists:concat([BaseUrl, ?b2l(TmpDb)]),
+ UsersUrl = lists:concat([BaseUrl, ?b2l(UserDb)]),
+ create_db(UsersUrl),
+ create_db(Url),
+ create_design_doc(Url),
+ create_user(UsersUrl,?TEST_MEMBER,?TEST_MEMBER_PASS,[<<?TEST_MEMBER>>]),
+ create_user(UsersUrl,?TEST_ADMIN,?TEST_ADMIN_PASS,[<<?TEST_ADMIN>>]),
+ set_security(Url),
+ [Url, UsersUrl].
+
+teardown([Url,UsersUrl]) ->
+ delete_db(Url),
+ delete_db(UsersUrl),
+ ok = config:delete("admins", ?USER, _Persist=false).
+
+create_db(Url) ->
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+create_design_doc(Url) ->
+ {ok, Status, _, _} = test_request:put(lists:concat([Url, '/_design/test']), [?CONTENT_JSON, ?AUTH],
+ "{\"id\":\"_design/test\"}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+set_security(Url) ->
+ SecurityUrl = lists:concat([Url, "/_security"]),
+ SecurityProperties = [
+ {<<"admins">>,{[{<<"roles">>,[<<?TEST_ADMIN>>]}]}},
+ {<<"members">>,{[{<<"roles">>,[<<?TEST_MEMBER>>]}]}}
+ ],
+
+ Body = jiffy:encode({SecurityProperties}),
+ {ok, Status, _, _} = test_request:put(SecurityUrl, [?CONTENT_JSON, ?AUTH], Body),
+ ?assert(Status =:= 200).
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+create_user(UsersUrl, Name, Password, Roles) ->
+
+ Body = "{\"name\":\"" ++ Name ++
+ "\",\"type\":\"user\",\"roles\":" ++ erlang:binary_to_list(jiffy:encode(Roles)) ++ ",\"password\":\"" ++ Password ++"\"}",
+
+ Url = lists:concat([
+ UsersUrl, "/org.couchdb.user:", Name]),
+ {ok, 201, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], Body).
+
+
+all_test_() ->
+ {
+ "chttpd security tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_allow_admin_db_compaction/1,
+ fun should_allow_valid_password_to_create_user/1,
+ fun should_disallow_invalid_password_to_create_user/1,
+ fun should_disallow_anonymous_db_compaction/1,
+ fun should_disallow_db_member_db_compaction/1,
+ fun should_allow_db_admin_db_compaction/1,
+ fun should_allow_admin_view_compaction/1,
+ fun should_disallow_anonymous_view_compaction/1,
+ fun should_allow_admin_db_view_cleanup/1,
+ fun should_disallow_anonymous_db_view_cleanup/1,
+ fun should_allow_admin_purge/1,
+ fun should_disallow_anonymous_purge/1,
+ fun should_disallow_db_member_purge/1,
+ fun should_allow_admin_purged_infos_limit/1,
+ fun should_disallow_anonymous_purged_infos_limit/1,
+ fun should_disallow_db_member_purged_infos_limit/1
+ ]
+ }
+ }
+ }.
+
+security_object_validate_test_() ->
+ {
+ "chttpd security object validate tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_return_ok_for_sec_obj_with_roles/1,
+ fun should_return_ok_for_sec_obj_with_names/1,
+ fun should_return_ok_for_sec_obj_with_roles_and_names/1,
+ fun should_return_error_for_sec_obj_with_incorrect_roles_and_names/1,
+ fun should_return_error_for_sec_obj_with_incorrect_roles/1,
+ fun should_return_error_for_sec_obj_with_incorrect_names/1
+ ]
+ }
+ }
+ }.
+
+should_allow_admin_db_compaction([Url,_UsersUrl]) ->
+ ?_assertEqual(true,
+ begin
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact",
+ [?CONTENT_JSON, ?AUTH], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ couch_util:get_value(<<"ok">>, InnerJson, undefined)
+ end).
+
+should_allow_valid_password_to_create_user([_Url, UsersUrl]) ->
+ UserDoc = "{\"_id\": \"org.couchdb.user:foo\", \"name\": \"foo\",
+ \"type\": \"user\", \"roles\": [], \"password\": \"bar\"}",
+ {ok, _, _, ResultBody} = test_request:post(UsersUrl,
+ [?CONTENT_JSON, ?AUTH], UserDoc),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ?_assertEqual(true, couch_util:get_value(<<"ok">>, InnerJson)).
+
+should_disallow_invalid_password_to_create_user([_Url, UsersUrl]) ->
+ UserDoc = "{\"_id\": \"org.couchdb.user:foo\", \"name\": \"foo\",
+ \"type\": \"user\", \"roles\": [], \"password\": 123}",
+ {ok, _, _, ResultBody} = test_request:post(UsersUrl,
+ [?CONTENT_JSON, ?AUTH], UserDoc),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ErrType = couch_util:get_value(<<"error">>, InnerJson),
+ ?_assertEqual(<<"forbidden">>, ErrType).
+
+should_disallow_anonymous_db_compaction([Url,_UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact",
+ [?CONTENT_JSON], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ErrType = couch_util:get_value(<<"error">>, InnerJson),
+ ?_assertEqual(<<"unauthorized">>,ErrType).
+
+should_disallow_db_member_db_compaction([Url,_UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact",
+ [?CONTENT_JSON, ?TEST_MEMBER_AUTH], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ErrType = couch_util:get_value(<<"error">>, InnerJson),
+ ?_assertEqual(<<"unauthorized">>,ErrType).
+
+should_allow_db_admin_db_compaction([Url,_UsersUrl]) ->
+ ?_assertEqual(true,
+ begin
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact",
+ [?CONTENT_JSON, ?TEST_ADMIN_AUTH], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ couch_util:get_value(<<"ok">>, InnerJson, undefined)
+ end).
+
+should_allow_admin_view_compaction([Url,_UsersUrl]) ->
+ ?_assertEqual(true,
+ begin
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact/test",
+ [?CONTENT_JSON, ?AUTH], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ couch_util:get_value(<<"ok">>, InnerJson, undefined)
+ end).
+
+should_disallow_anonymous_view_compaction([Url,_UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_compact/test",
+ [?CONTENT_JSON], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ErrType = couch_util:get_value(<<"error">>, InnerJson),
+ ?_assertEqual(<<"unauthorized">>,ErrType).
+
+should_allow_admin_db_view_cleanup([Url,_UsersUrl]) ->
+ ?_assertEqual(true,
+ begin
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_view_cleanup",
+ [?CONTENT_JSON, ?AUTH], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ couch_util:get_value(<<"ok">>, InnerJson, undefined)
+ end).
+
+should_disallow_anonymous_db_view_cleanup([Url,_UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_view_cleanup",
+ [?CONTENT_JSON], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ErrType = couch_util:get_value(<<"error">>, InnerJson),
+ ?_assertEqual(<<"unauthorized">>, ErrType).
+
+should_allow_admin_purge([Url,_UsersUrl]) ->
+ ?_assertEqual(null,
+ begin
+ IdsRevs = "{}",
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_purge",
+ [?CONTENT_JSON, ?AUTH], IdsRevs),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ couch_util:get_value(<<"purge_seq">>, InnerJson, undefined)
+ end).
+
+should_disallow_anonymous_purge([Url,_UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_purge",
+ [?CONTENT_JSON], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ErrType = couch_util:get_value(<<"error">>, InnerJson),
+ ?_assertEqual(<<"unauthorized">>, ErrType).
+
+should_disallow_db_member_purge([Url,_UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:post(Url ++ "/_purge",
+ [?CONTENT_JSON, ?TEST_MEMBER_AUTH], ""),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ErrType = couch_util:get_value(<<"error">>, InnerJson),
+ ?_assertEqual(<<"unauthorized">>,ErrType).
+
+should_allow_admin_purged_infos_limit([Url,_UsersUrl]) ->
+ ?_assertEqual(true,
+ begin
+ {ok, _, _, ResultBody} = test_request:put(Url
+ ++ "/_purged_infos_limit/", [?CONTENT_JSON, ?AUTH], "2"),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ couch_util:get_value(<<"ok">>, InnerJson, undefined)
+ end).
+
+should_disallow_anonymous_purged_infos_limit([Url,_UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:put(Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?TEST_MEMBER_AUTH], "2"),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ErrType = couch_util:get_value(<<"error">>, InnerJson),
+ ?_assertEqual(<<"unauthorized">>, ErrType).
+
+should_disallow_db_member_purged_infos_limit([Url,_UsersUrl]) ->
+ {ok, _, _, ResultBody} = test_request:put(Url ++ "/_purged_infos_limit/",
+ [?CONTENT_JSON, ?TEST_MEMBER_AUTH], "2"),
+ ResultJson = ?JSON_DECODE(ResultBody),
+ {InnerJson} = ResultJson,
+ ErrType = couch_util:get_value(<<"error">>, InnerJson),
+ ?_assertEqual(<<"unauthorized">>,ErrType).
+
+should_return_ok_for_sec_obj_with_roles([Url,_UsersUrl]) ->
+ SecurityUrl = lists:concat([Url, "/_security"]),
+ SecurityProperties = [
+ {<<"admins">>,{[{<<"roles">>,[<<?TEST_ADMIN>>]}]}},
+ {<<"members">>,{[{<<"roles">>,[<<?TEST_MEMBER>>]}]}}
+ ],
+
+ Body = jiffy:encode({SecurityProperties}),
+ {ok, Status, _, _} = test_request:put(SecurityUrl,
+ [?CONTENT_JSON, ?AUTH], Body),
+ ?_assertEqual(200, Status).
+
+should_return_ok_for_sec_obj_with_names([Url,_UsersUrl]) ->
+ SecurityUrl = lists:concat([Url, "/_security"]),
+ SecurityProperties = [
+ {<<"admins">>,{[{<<"names">>,[<<?TEST_ADMIN>>]}]}},
+ {<<"members">>,{[{<<"names">>,[<<?TEST_MEMBER>>]}]}}
+ ],
+
+ Body = jiffy:encode({SecurityProperties}),
+ {ok, Status, _, _} = test_request:put(SecurityUrl,
+ [?CONTENT_JSON, ?AUTH], Body),
+ ?_assertEqual(200, Status).
+
+should_return_ok_for_sec_obj_with_roles_and_names([Url,_UsersUrl]) ->
+ SecurityUrl = lists:concat([Url, "/_security"]),
+ SecurityProperties = [
+ {<<"admins">>, {[{<<"names">>,[<<?TEST_ADMIN>>]},
+ {<<"roles">>,[<<?TEST_ADMIN>>]}]}},
+ {<<"members">>,{[{<<"names">>,[<<?TEST_MEMBER>>]},
+ {<<"roles">>,[<<?TEST_MEMBER>>]}]}}
+ ],
+
+ Body = jiffy:encode({SecurityProperties}),
+ {ok, Status, _, _} = test_request:put(SecurityUrl,
+ [?CONTENT_JSON, ?AUTH], Body),
+ ?_assertEqual(200, Status).
+
+should_return_error_for_sec_obj_with_incorrect_roles_and_names(
+ [Url,_UsersUrl]) ->
+ SecurityUrl = lists:concat([Url, "/_security"]),
+ SecurityProperties = [
+ {<<"admins">>,{[{<<"names">>,[123]}]}},
+ {<<"members">>,{[{<<"roles">>,["foo"]}]}}
+ ],
+
+ Body = jiffy:encode({SecurityProperties}),
+ {ok, Status, _, RespBody} = test_request:put(SecurityUrl,
+ [?CONTENT_JSON, ?AUTH], Body),
+ ResultJson = ?JSON_DECODE(RespBody),
+ [
+ ?_assertEqual(500, Status),
+ ?_assertEqual({[
+ {<<"error">>,<<"error">>},
+ {<<"reason">>,<<"no_majority">>}
+ ]}, ResultJson)
+ ].
+
+should_return_error_for_sec_obj_with_incorrect_roles([Url,_UsersUrl]) ->
+ SecurityUrl = lists:concat([Url, "/_security"]),
+ SecurityProperties = [
+ {<<"admins">>,{[{<<"roles">>,[?TEST_ADMIN]}]}},
+ {<<"members">>,{[{<<"roles">>,[<<?TEST_MEMBER>>]}]}}
+ ],
+
+ Body = jiffy:encode({SecurityProperties}),
+ {ok, Status, _, RespBody} = test_request:put(SecurityUrl,
+ [?CONTENT_JSON, ?AUTH], Body),
+ ResultJson = ?JSON_DECODE(RespBody),
+ [
+ ?_assertEqual(500, Status),
+ ?_assertEqual({[
+ {<<"error">>,<<"error">>},
+ {<<"reason">>,<<"no_majority">>}
+ ]}, ResultJson)
+ ].
+
+should_return_error_for_sec_obj_with_incorrect_names([Url,_UsersUrl]) ->
+ SecurityUrl = lists:concat([Url, "/_security"]),
+ SecurityProperties = [
+ {<<"admins">>,{[{<<"names">>,[<<?TEST_ADMIN>>]}]}},
+ {<<"members">>,{[{<<"names">>,[?TEST_MEMBER]}]}}
+ ],
+
+ Body = jiffy:encode({SecurityProperties}),
+ {ok, Status, _, RespBody} = test_request:put(SecurityUrl,
+ [?CONTENT_JSON, ?AUTH], Body),
+ ResultJson = ?JSON_DECODE(RespBody),
+ [
+ ?_assertEqual(500, Status),
+ ?_assertEqual({[
+ {<<"error">>,<<"error">>},
+ {<<"reason">>,<<"no_majority">>}
+ ]}, ResultJson)
+ ].
diff --git a/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl b/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl
new file mode 100644
index 000000000..937880621
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_socket_buffer_size_test.erl
@@ -0,0 +1,127 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_socket_buffer_size_test).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(USER, "chttpd_db_socket_buffer_size_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+
+
+setup(SocketOpts) ->
+ StartCtx = start_couch_with_cfg(SocketOpts),
+ Db = ?tempdb(),
+ create_db(url(Db)),
+ {StartCtx, Db}.
+
+
+teardown(_, {StartCtx, Db}) ->
+ delete_db(url(Db)),
+ ok = config:delete("admins", ?USER, _Persist=false),
+ test_util:stop_couch(StartCtx).
+
+
+socket_buffer_size_test_() ->
+ {
+ "chttpd socket_buffer_size_test",
+ {
+ foreachx,
+ fun setup/1, fun teardown/2,
+ [
+ {"[{recbuf, undefined}]", fun default_buffer/2},
+ {"[{recbuf, 1024}]", fun small_recbuf/2},
+ {"[{buffer, 1024}]", fun small_buffer/2}
+ ]
+ }
+ }.
+
+
+small_recbuf(_, {_, Db}) ->
+ {timeout, 30, ?_test(begin
+ Id = data(2048),
+ Response = put_req(url(Db) ++ "/" ++ Id, "{}"),
+ ?assert(Response =:= 400 orelse Response =:= request_failed)
+ end)}.
+
+
+small_buffer(_, {_, Db}) ->
+ {timeout, 30, ?_test(begin
+ Id = data(2048),
+ Response = put_req(url(Db) ++ "/" ++ Id, "{}"),
+ ?assert(Response =:= 400 orelse Response =:= request_failed)
+ end)}.
+
+
+default_buffer(_, {_, Db}) ->
+ {timeout, 30, ?_test(begin
+ Id = data(7000),
+ Headers = [{"Blah", data(7000)}],
+ Status = put_req(url(Db) ++ "/" ++ Id, Headers, "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202)
+ end)}.
+
+
+% Helper functions
+
+url() ->
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = integer_to_list(mochiweb_socket_server:get(chttpd, port)),
+ "http://" ++ Addr ++ ":" ++ Port.
+
+
+url(Db) ->
+ url() ++ "/" ++ ?b2l(Db).
+
+
+create_db(Url) ->
+ Status = put_req(Url ++ "?q=1&n=1", "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+
+put_req(Url, Body) ->
+ put_req(Url, [], Body).
+
+
+put_req(Url, Headers, Body) ->
+ AllHeaders = Headers ++ [?CONTENT_JSON, ?AUTH],
+ case test_request:put(Url, AllHeaders, Body) of
+ {ok, Status, _, _} -> Status;
+ {error, Error} -> Error
+ end.
+
+
+data(Size) ->
+ string:copies("x", Size).
+
+
+append_to_cfg_chain(Cfg) ->
+ CfgDir = filename:dirname(lists:last(?CONFIG_CHAIN)),
+ CfgFile = filename:join([CfgDir, "chttpd_socket_buffer_extra_cfg.ini"]),
+ CfgSect = io_lib:format("[chttpd]~nserver_options = ~s~n", [Cfg]),
+ ok = file:write_file(CfgFile, CfgSect),
+ ?CONFIG_CHAIN ++ [CfgFile].
+
+
+start_couch_with_cfg(Cfg) ->
+ CfgChain = append_to_cfg_chain(Cfg),
+ StartCtx = test_util:start_couch(CfgChain, [chttpd]),
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ StartCtx.
diff --git a/src/chttpd/test/eunit/chttpd_view_test.erl b/src/chttpd/test/eunit/chttpd_view_test.erl
new file mode 100644
index 000000000..4c224bb4e
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_view_test.erl
@@ -0,0 +1,124 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_view_test).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(USER, "chttpd_view_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+-define(DDOC, "{\"_id\": \"_design/bar\", \"views\": {\"baz\":
+ {\"map\": \"function(doc) {emit(doc._id, doc._id);}\"}}}").
+
+-define(FIXTURE_TXT, ?ABS_PATH(?FILE)).
+-define(i2l(I), integer_to_list(I)).
+-define(TIMEOUT, 60). % seconds
+
+setup() ->
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ TmpDb = ?tempdb(),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]),
+ create_db(Url),
+ Url.
+
+teardown(Url) ->
+ delete_db(Url),
+ ok = config:delete("admins", ?USER, _Persist=false).
+
+create_db(Url) ->
+ {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"),
+ ?assert(Status =:= 201 orelse Status =:= 202).
+
+
+create_doc(Url, Id) ->
+ test_request:put(Url ++ "/" ++ Id,
+ [?CONTENT_JSON, ?AUTH], "{\"mr\": \"rockoartischocko\"}").
+
+delete_db(Url) ->
+ {ok, 200, _, _} = test_request:delete(Url, [?AUTH]).
+
+all_view_test_() ->
+ {
+ "chttpd view tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_succeed_on_view_with_queries_keys/1,
+ fun should_succeed_on_view_with_queries_limit_skip/1,
+ fun should_succeed_on_view_with_multiple_queries/1
+ ]
+ }
+ }
+ }.
+
+
+should_succeed_on_view_with_queries_keys(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ {ok, _, _, _} = test_request:put(Url ++ "/_design/bar",
+ [?CONTENT_JSON, ?AUTH], ?DDOC),
+ QueryDoc = "{\"queries\": [{\"keys\": [ \"testdoc3\",
+ \"testdoc8\"]}]}",
+ {ok, _, _, RespBody} = test_request:post(Url ++ "/_design/bar/"
+ ++ "_view/baz/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
+
+
+should_succeed_on_view_with_queries_limit_skip(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ {ok, _, _, _} = test_request:put(Url ++ "/_design/bar",
+ [?CONTENT_JSON, ?AUTH], ?DDOC),
+ QueryDoc = "{\"queries\": [{\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++ "/_design/bar/"
+ ++ "_view/baz/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, couch_util:get_value(<<"offset">>, InnerJson)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson)))
+ end)}.
+
+
+should_succeed_on_view_with_multiple_queries(Url) ->
+ {timeout, ?TIMEOUT, ?_test(begin
+ [create_doc(Url, "testdoc" ++ ?i2l(I)) || I <- lists:seq(1, 10)],
+ {ok, _, _, _} = test_request:put(Url ++ "/_design/bar",
+ [?CONTENT_JSON, ?AUTH], ?DDOC),
+ QueryDoc = "{\"queries\": [{\"keys\": [ \"testdoc3\",
+ \"testdoc8\"]}, {\"limit\": 5, \"skip\": 2}]}",
+ {ok, RC, _, RespBody} = test_request:post(Url ++ "/_design/bar/"
+ ++ "_view/baz/queries/", [?CONTENT_JSON, ?AUTH], QueryDoc),
+ ?assertEqual(200, RC),
+ {ResultJson} = ?JSON_DECODE(RespBody),
+ ResultJsonBody = couch_util:get_value(<<"results">>, ResultJson),
+ {InnerJson1} = lists:nth(1, ResultJsonBody),
+ ?assertEqual(2, length(couch_util:get_value(<<"rows">>, InnerJson1))),
+ {InnerJson2} = lists:nth(2, ResultJsonBody),
+ ?assertEqual(2, couch_util:get_value(<<"offset">>, InnerJson2)),
+ ?assertEqual(5, length(couch_util:get_value(<<"rows">>, InnerJson2)))
+ end)}.
diff --git a/src/chttpd/test/eunit/chttpd_welcome_test.erl b/src/chttpd/test/eunit/chttpd_welcome_test.erl
new file mode 100644
index 000000000..e427f4dff
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_welcome_test.erl
@@ -0,0 +1,104 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(chttpd_welcome_test).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(USER, "chttpd_db_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+-define(CONTENT_JSON, {"Content-Type", "application/json"}).
+
+
+setup() ->
+ Hashed = couch_passwords:hash_admin_password(?PASS),
+ ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false),
+ Addr = config:get("chttpd", "bind_address", "127.0.0.1"),
+ Port = mochiweb_socket_server:get(chttpd, port),
+ Url = lists:concat(["http://", Addr, ":", Port, "/"]),
+ Url.
+
+
+teardown(_Url) ->
+ ok = config:delete("admins", ?USER, _Persist=false).
+
+
+welcome_test_() ->
+ {
+ "chttpd welcome endpoint tests",
+ {
+ setup,
+ fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_have_version/1,
+ fun should_have_features/1,
+ fun should_have_uuid/1
+ ]
+ }
+ }
+ }.
+
+should_have_uuid(Url) ->
+ ?_test(begin
+ {ok, Status, _, Body} = test_request:get(Url, [?CONTENT_JSON, ?AUTH]),
+ ?assertEqual(200, Status),
+ {Json} = ?JSON_DECODE(Body),
+ CouchDB = couch_util:get_value(<<"couchdb">>, Json, undefined),
+ Uuid = couch_util:get_value(<<"uuid">>, Json, undefined),
+ Features = couch_util:get_value(<<"features">>, Json, undefined),
+ Sha = couch_util:get_value(<<"git_sha">>, Json, undefined),
+ ?assertNotEqual(Sha, undefined),
+ ?assertEqual(<<"Welcome">>, CouchDB),
+ RealUuid = couch_server:get_uuid(),
+
+ ?assertEqual(RealUuid, Uuid),
+ ?assert(is_list(Features))
+ end).
+
+
+should_have_version(Url) ->
+ ?_test(begin
+ {ok, Status, _, Body} = test_request:get(Url, [?CONTENT_JSON, ?AUTH]),
+ ?assertEqual(200, Status),
+ {Json} = ?JSON_DECODE(Body),
+ Version = couch_util:get_value(<<"version">>, Json, undefined),
+ CouchDB = couch_util:get_value(<<"couchdb">>, Json, undefined),
+ Features = couch_util:get_value(<<"features">>, Json, undefined),
+ Sha = couch_util:get_value(<<"git_sha">>, Json, undefined),
+ ?assertNotEqual(Sha, undefined),
+ ?assertEqual(<<"Welcome">>, CouchDB),
+ RealVersion = list_to_binary(couch_server:get_version()),
+ ?assertEqual(RealVersion, Version),
+ ?assert(is_list(Features))
+ end).
+
+
+should_have_features(Url) ->
+ ?_test(begin
+ config:enable_feature(snek),
+ {ok, 200, _, Body1} = test_request:get(Url, [?CONTENT_JSON, ?AUTH]),
+ {Json1} = ?JSON_DECODE(Body1),
+ Features1 = couch_util:get_value(<<"features">>, Json1, undefined),
+ ?assert(is_list(Features1)),
+ ?assert(lists:member(<<"snek">>, Features1)),
+ config:disable_feature(snek),
+ {ok, 200, _, Body2} = test_request:get(Url, [?CONTENT_JSON, ?AUTH]),
+ {Json2} = ?JSON_DECODE(Body2),
+ Features2 = couch_util:get_value(<<"features">>, Json2, undefined),
+ ?assert(is_list(Features2)),
+ ?assertNot(lists:member(<<"snek">>, Features2))
+ end).
diff --git a/src/chttpd/test/eunit/chttpd_xframe_test.erl b/src/chttpd/test/eunit/chttpd_xframe_test.erl
new file mode 100644
index 000000000..1272c198c
--- /dev/null
+++ b/src/chttpd/test/eunit/chttpd_xframe_test.erl
@@ -0,0 +1,84 @@
+-module(chttpd_xframe_test).
+
+
+-include_lib("couch/include/couch_db.hrl").
+-include_lib("eunit/include/eunit.hrl").
+
+setup() ->
+ ok = meck:new(config),
+ ok = meck:expect(config, get, fun(_, _, _) -> "X-Forwarded-Host" end),
+ ok.
+
+teardown(_) ->
+ meck:unload(config).
+
+mock_request() ->
+ Headers = mochiweb_headers:make([{"Host", "examples.com"}]),
+ MochiReq = mochiweb_request:new(nil, 'GET', '/', {1, 1}, Headers),
+ #httpd{mochi_req = MochiReq}.
+
+config_disabled() ->
+ [
+ {enabled, false}
+ ].
+
+config_sameorigin() ->
+ [
+ {enabled, true},
+ {same_origin, true}
+ ].
+
+config_wildcard() ->
+ [
+ {enabled, true},
+ {same_origin, false},
+ {hosts, ["*"]}
+ ].
+
+config_specific_hosts() ->
+ [
+ {enabled, true},
+ {same_origin, false},
+ {hosts, ["http://couchdb.org", "http://examples.com"]}
+ ].
+
+config_diffent_specific_hosts() ->
+ [
+ {enabled, true},
+ {same_origin, false},
+ {hosts, ["http://couchdb.org"]}
+ ].
+
+no_header_if_xframe_disabled_test() ->
+ Headers = chttpd_xframe_options:header(mock_request(), [], config_disabled()),
+ ?assertEqual(Headers, []).
+
+enabled_with_same_origin_test() ->
+ Headers = chttpd_xframe_options:header(mock_request(), [], config_sameorigin()),
+ ?assertEqual(Headers, [{"X-Frame-Options", "SAMEORIGIN"}]).
+
+
+xframe_host_test_() ->
+ {
+ "xframe host tests",
+ {
+ foreach, fun setup/0, fun teardown/1,
+ [
+ fun allow_with_wildcard_host/1,
+ fun allow_with_specific_host/1,
+ fun deny_with_different_host/1
+ ]
+ }
+ }.
+
+allow_with_wildcard_host(_) ->
+ Headers = chttpd_xframe_options:header(mock_request(), [], config_wildcard()),
+ ?_assertEqual([{"X-Frame-Options", "ALLOW-FROM http://examples.com"}], Headers).
+
+allow_with_specific_host(_) ->
+ Headers = chttpd_xframe_options:header(mock_request(), [], config_specific_hosts()),
+ ?_assertEqual([{"X-Frame-Options", "ALLOW-FROM http://examples.com"}], Headers).
+
+deny_with_different_host(_) ->
+ Headers = chttpd_xframe_options:header(mock_request(), [], config_diffent_specific_hosts()),
+ ?_assertEqual([{"X-Frame-Options", "DENY"}], Headers).