http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/94ae4be2/test/couchdb_attachments_tests.erl ---------------------------------------------------------------------- diff --git a/test/couchdb_attachments_tests.erl b/test/couchdb_attachments_tests.erl index b203a82..f1d7776 100644 --- a/test/couchdb_attachments_tests.erl +++ b/test/couchdb_attachments_tests.erl @@ -26,603 +26,603 @@ -define(i2l(I), integer_to_list(I)). -start() -> - ok = test_util:start_couch(), - % ensure in default compression settings for attachments_compression_tests - config:set("attachments", "compression_level", - ?i2l(?COMPRESSION_LEVEL), false), - config:set("attachments", "compressible_types", "text/*", false), - ok. - -setup() -> - DbName = ?tempdb(), - {ok, Db} = couch_db:create(DbName, []), - ok = couch_db:close(Db), - Addr = config:get("httpd", "bind_address", any), - Port = mochiweb_socket_server:get(couch_httpd, port), - Host = Addr ++ ":" ++ ?i2l(Port), - {Host, ?b2l(DbName)}. - -setup({binary, standalone}) -> - {Host, DbName} = setup(), - setup_att(fun create_standalone_png_att/2, Host, DbName, ?FIXTURE_PNG); -setup({text, standalone}) -> - {Host, DbName} = setup(), - setup_att(fun create_standalone_text_att/2, Host, DbName, ?FIXTURE_TXT); -setup({binary, inline}) -> - {Host, DbName} = setup(), - setup_att(fun create_inline_png_att/2, Host, DbName, ?FIXTURE_PNG); -setup({text, inline}) -> - {Host, DbName} = setup(), - setup_att(fun create_inline_text_att/2, Host, DbName, ?FIXTURE_TXT); -setup(compressed) -> - {Host, DbName} = setup(), - setup_att(fun create_already_compressed_att/2, Host, DbName, ?FIXTURE_TXT). -setup_att(Fun, Host, DbName, File) -> - HttpHost = "http://" ++ Host, - AttUrl = Fun(HttpHost, DbName), - {ok, Data} = file:read_file(File), - DocUrl = string:join([HttpHost, DbName, "doc"], "/"), - Helpers = {DbName, DocUrl, AttUrl}, - {Data, Helpers}. - -teardown(_, {_, {DbName, _, _}}) -> - teardown(DbName). - -teardown({_, DbName}) -> - teardown(DbName); -teardown(DbName) -> - ok = couch_server:delete(?l2b(DbName), []), - ok. - - -attachments_test_() -> - { - "Attachments tests", - { - setup, - fun start/0, fun test_util:stop_couch/1, - [ - attachments_md5_tests(), - attachments_compression_tests() - ] - } - }. - -attachments_md5_tests() -> - { - "Attachments MD5 tests", - { - foreach, - fun setup/0, fun teardown/1, - [ - fun should_upload_attachment_without_md5/1, - fun should_upload_attachment_by_chunks_without_md5/1, - fun should_upload_attachment_with_valid_md5_header/1, - fun should_upload_attachment_by_chunks_with_valid_md5_header/1, - fun should_upload_attachment_by_chunks_with_valid_md5_trailer/1, - fun should_reject_attachment_with_invalid_md5/1, - fun should_reject_chunked_attachment_with_invalid_md5/1, - fun should_reject_chunked_attachment_with_invalid_md5_trailer/1 - ] - } - }. - -attachments_compression_tests() -> - Funs = [ - fun should_get_att_without_accept_gzip_encoding/2, - fun should_get_att_with_accept_gzip_encoding/2, - fun should_get_att_with_accept_deflate_encoding/2, - fun should_return_406_response_on_unsupported_encoding/2, - fun should_get_doc_with_att_data/2, - fun should_get_doc_with_att_data_stub/2 - ], - { - "Attachments compression tests", - [ - { - "Created via Attachments API", - created_attachments_compression_tests(standalone, Funs) - }, - { - "Created inline via Document API", - created_attachments_compression_tests(inline, Funs) - }, - { - "Created already been compressed via Attachments API", - { - foreachx, - fun setup/1, fun teardown/2, - [{compressed, Fun} || Fun <- Funs] - } - }, - { - foreach, - fun setup/0, fun teardown/1, - [ - fun should_not_create_compressed_att_with_deflate_encoding/1, - fun should_not_create_compressed_att_with_compress_encoding/1, - fun should_create_compressible_att_with_ctype_params/1 - ] - } - ] - }. - -created_attachments_compression_tests(Mod, Funs) -> - [ - { - "Compressiable attachments", - { - foreachx, - fun setup/1, fun teardown/2, - [{{text, Mod}, Fun} || Fun <- Funs] - } - }, - { - "Uncompressiable attachments", - { - foreachx, - fun setup/1, fun teardown/2, - [{{binary, Mod}, Fun} || Fun <- Funs] - } - } - ]. - - - -should_upload_attachment_without_md5({Host, DbName}) -> - ?_test(begin - AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), - Body = "We all live in a yellow submarine!", - Headers = [ - {"Content-Length", "34"}, - {"Content-Type", "text/plain"}, - {"Host", Host} - ], - {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), - ?assertEqual(201, Code), - ?assertEqual(true, get_json(Json, [<<"ok">>])) - end). - -should_upload_attachment_by_chunks_without_md5({Host, DbName}) -> - ?_test(begin - AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Body = chunked_body([Part1, Part2]), - Headers = [ - {"Content-Type", "text/plain"}, - {"Transfer-Encoding", "chunked"}, - {"Host", Host} - ], - {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), - ?assertEqual(201, Code), - ?assertEqual(true, get_json(Json, [<<"ok">>])) - end). - -should_upload_attachment_with_valid_md5_header({Host, DbName}) -> - ?_test(begin - AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), - Body = "We all live in a yellow submarine!", - Headers = [ - {"Content-Length", "34"}, - {"Content-Type", "text/plain"}, - {"Content-MD5", ?b2l(base64:encode(couch_util:md5(Body)))}, - {"Host", Host} - ], - {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), - ?assertEqual(201, Code), - ?assertEqual(true, get_json(Json, [<<"ok">>])) - end). - -should_upload_attachment_by_chunks_with_valid_md5_header({Host, DbName}) -> - ?_test(begin - AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Body = chunked_body([Part1, Part2]), - Headers = [ - {"Content-Type", "text/plain"}, - {"Content-MD5", ?b2l(base64:encode(couch_util:md5(AttData)))}, - {"Host", Host}, - {"Transfer-Encoding", "chunked"} - ], - {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), - ?assertEqual(201, Code), - ?assertEqual(true, get_json(Json, [<<"ok">>])) - end). - -should_upload_attachment_by_chunks_with_valid_md5_trailer({Host, DbName}) -> - ?_test(begin - AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Body = [chunked_body([Part1, Part2]), - "Content-MD5: ", base64:encode(couch_util:md5(AttData)), - "\r\n"], - Headers = [ - {"Content-Type", "text/plain"}, - {"Host", Host}, - {"Trailer", "Content-MD5"}, - {"Transfer-Encoding", "chunked"} - ], - {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), - ?assertEqual(201, Code), - ?assertEqual(true, get_json(Json, [<<"ok">>])) - end). - -should_reject_attachment_with_invalid_md5({Host, DbName}) -> - ?_test(begin - AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), - Body = "We all live in a yellow submarine!", - Headers = [ - {"Content-Length", "34"}, - {"Content-Type", "text/plain"}, - {"Content-MD5", ?b2l(base64:encode(<<"foobar!">>))}, - {"Host", Host} - ], - {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), - ?assertEqual(400, Code), - ?assertEqual(<<"content_md5_mismatch">>, - get_json(Json, [<<"error">>])) - end). - - -should_reject_chunked_attachment_with_invalid_md5({Host, DbName}) -> - ?_test(begin - AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Body = chunked_body([Part1, Part2]), - Headers = [ - {"Content-Type", "text/plain"}, - {"Content-MD5", ?b2l(base64:encode(<<"foobar!">>))}, - {"Host", Host}, - {"Transfer-Encoding", "chunked"} - ], - {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), - ?assertEqual(400, Code), - ?assertEqual(<<"content_md5_mismatch">>, - get_json(Json, [<<"error">>])) - end). - -should_reject_chunked_attachment_with_invalid_md5_trailer({Host, DbName}) -> - ?_test(begin - AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Body = [chunked_body([Part1, Part2]), - "Content-MD5: ", base64:encode(<<"foobar!">>), - "\r\n"], - Headers = [ - {"Content-Type", "text/plain"}, - {"Host", Host}, - {"Trailer", "Content-MD5"}, - {"Transfer-Encoding", "chunked"} - ], - {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), - ?assertEqual(400, Code), - ?assertEqual(<<"content_md5_mismatch">>, get_json(Json, [<<"error">>])) - end). - -should_get_att_without_accept_gzip_encoding(_, {Data, {_, _, AttUrl}}) -> - ?_test(begin - {ok, Code, Headers, Body} = test_request:get(AttUrl), - ?assertEqual(200, Code), - ?assertNot(lists:member({"Content-Encoding", "gzip"}, Headers)), - ?assertEqual(Data, iolist_to_binary(Body)) - end). - -should_get_att_with_accept_gzip_encoding(compressed, {Data, {_, _, AttUrl}}) -> - ?_test(begin - {ok, Code, Headers, Body} = test_request:get( - AttUrl, [{"Accept-Encoding", "gzip"}]), - ?assertEqual(200, Code), - ?assert(lists:member({"Content-Encoding", "gzip"}, Headers)), - ?assertEqual(Data, zlib:gunzip(iolist_to_binary(Body))) - end); -should_get_att_with_accept_gzip_encoding({text, _}, {Data, {_, _, AttUrl}}) -> - ?_test(begin - {ok, Code, Headers, Body} = test_request:get( - AttUrl, [{"Accept-Encoding", "gzip"}]), - ?assertEqual(200, Code), - ?assert(lists:member({"Content-Encoding", "gzip"}, Headers)), - ?assertEqual(Data, zlib:gunzip(iolist_to_binary(Body))) - end); -should_get_att_with_accept_gzip_encoding({binary, _}, {Data, {_, _, AttUrl}}) -> - ?_test(begin - {ok, Code, Headers, Body} = test_request:get( - AttUrl, [{"Accept-Encoding", "gzip"}]), - ?assertEqual(200, Code), - ?assertEqual(undefined, - couch_util:get_value("Content-Encoding", Headers)), - ?assertEqual(Data, iolist_to_binary(Body)) - end). - -should_get_att_with_accept_deflate_encoding(_, {Data, {_, _, AttUrl}}) -> - ?_test(begin - {ok, Code, Headers, Body} = test_request:get( - AttUrl, [{"Accept-Encoding", "deflate"}]), - ?assertEqual(200, Code), - ?assertEqual(undefined, - couch_util:get_value("Content-Encoding", Headers)), - ?assertEqual(Data, iolist_to_binary(Body)) - end). - -should_return_406_response_on_unsupported_encoding(_, {_, {_, _, AttUrl}}) -> - ?_assertEqual(406, - begin - {ok, Code, _, _} = test_request:get( - AttUrl, [{"Accept-Encoding", "deflate, *;q=0"}]), - Code - end). - -should_get_doc_with_att_data(compressed, {Data, {_, DocUrl, _}}) -> - ?_test(begin - Url = DocUrl ++ "?attachments=true", - {ok, Code, _, Body} = test_request:get( - Url, [{"Accept", "application/json"}]), - ?assertEqual(200, Code), - Json = ejson:decode(Body), - AttJson = couch_util:get_nested_json_value( - Json, [<<"_attachments">>, ?ATT_TXT_NAME]), - AttData = couch_util:get_nested_json_value( - AttJson, [<<"data">>]), - ?assertEqual( - <<"text/plain">>, - couch_util:get_nested_json_value(AttJson,[<<"content_type">>])), - ?assertEqual(Data, base64:decode(AttData)) - end); -should_get_doc_with_att_data({text, _}, {Data, {_, DocUrl, _}}) -> - ?_test(begin - Url = DocUrl ++ "?attachments=true", - {ok, Code, _, Body} = test_request:get( - Url, [{"Accept", "application/json"}]), - ?assertEqual(200, Code), - Json = ejson:decode(Body), - AttJson = couch_util:get_nested_json_value( - Json, [<<"_attachments">>, ?ATT_TXT_NAME]), - AttData = couch_util:get_nested_json_value( - AttJson, [<<"data">>]), - ?assertEqual( - <<"text/plain">>, - couch_util:get_nested_json_value(AttJson,[<<"content_type">>])), - ?assertEqual(Data, base64:decode(AttData)) - end); -should_get_doc_with_att_data({binary, _}, {Data, {_, DocUrl, _}}) -> - ?_test(begin - Url = DocUrl ++ "?attachments=true", - {ok, Code, _, Body} = test_request:get( - Url, [{"Accept", "application/json"}]), - ?assertEqual(200, Code), - Json = ejson:decode(Body), - AttJson = couch_util:get_nested_json_value( - Json, [<<"_attachments">>, ?ATT_BIN_NAME]), - AttData = couch_util:get_nested_json_value( - AttJson, [<<"data">>]), - ?assertEqual( - <<"image/png">>, - couch_util:get_nested_json_value(AttJson,[<<"content_type">>])), - ?assertEqual(Data, base64:decode(AttData)) - end). - -should_get_doc_with_att_data_stub(compressed, {Data, {_, DocUrl, _}}) -> - ?_test(begin - Url = DocUrl ++ "?att_encoding_info=true", - {ok, Code, _, Body} = test_request:get( - Url, [{"Accept", "application/json"}]), - ?assertEqual(200, Code), - Json = ejson:decode(Body), - {AttJson} = couch_util:get_nested_json_value( - Json, [<<"_attachments">>, ?ATT_TXT_NAME]), - ?assertEqual(<<"gzip">>, - couch_util:get_value(<<"encoding">>, AttJson)), - AttLength = couch_util:get_value(<<"length">>, AttJson), - EncLength = couch_util:get_value(<<"encoded_length">>, AttJson), - ?assertEqual(AttLength, EncLength), - ?assertEqual(iolist_size(zlib:gzip(Data)), AttLength) - end); -should_get_doc_with_att_data_stub({text, _}, {Data, {_, DocUrl, _}}) -> - ?_test(begin - Url = DocUrl ++ "?att_encoding_info=true", - {ok, Code, _, Body} = test_request:get( - Url, [{"Accept", "application/json"}]), - ?assertEqual(200, Code), - Json = ejson:decode(Body), - {AttJson} = couch_util:get_nested_json_value( - Json, [<<"_attachments">>, ?ATT_TXT_NAME]), - ?assertEqual(<<"gzip">>, - couch_util:get_value(<<"encoding">>, AttJson)), - AttEncLength = iolist_size(gzip(Data)), - ?assertEqual(AttEncLength, - couch_util:get_value(<<"encoded_length">>, AttJson)), - ?assertEqual(byte_size(Data), - couch_util:get_value(<<"length">>, AttJson)) - end); -should_get_doc_with_att_data_stub({binary, _}, {Data, {_, DocUrl, _}}) -> - ?_test(begin - Url = DocUrl ++ "?att_encoding_info=true", - {ok, Code, _, Body} = test_request:get( - Url, [{"Accept", "application/json"}]), - ?assertEqual(200, Code), - Json = ejson:decode(Body), - {AttJson} = couch_util:get_nested_json_value( - Json, [<<"_attachments">>, ?ATT_BIN_NAME]), - ?assertEqual(undefined, - couch_util:get_value(<<"encoding">>, AttJson)), - ?assertEqual(undefined, - couch_util:get_value(<<"encoded_length">>, AttJson)), - ?assertEqual(byte_size(Data), - couch_util:get_value(<<"length">>, AttJson)) - end). - -should_not_create_compressed_att_with_deflate_encoding({Host, DbName}) -> - ?_assertEqual(415, - begin - HttpHost = "http://" ++ Host, - AttUrl = string:join([HttpHost, DbName, ?docid(), "file.txt"], "/"), - {ok, Data} = file:read_file(?FIXTURE_TXT), - Body = zlib:compress(Data), - Headers = [ - {"Content-Encoding", "deflate"}, - {"Content-Type", "text/plain"} - ], - {ok, Code, _, _} = test_request:put(AttUrl, Headers, Body), - Code - end). - -should_not_create_compressed_att_with_compress_encoding({Host, DbName}) -> - % Note: As of OTP R13B04, it seems there's no LZW compression - % (i.e. UNIX compress utility implementation) lib in OTP. - % However there's a simple working Erlang implementation at: - % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php - ?_assertEqual(415, - begin - HttpHost = "http://" ++ Host, - AttUrl = string:join([HttpHost, DbName, ?docid(), "file.txt"], "/"), - {ok, Data} = file:read_file(?FIXTURE_TXT), - Headers = [ - {"Content-Encoding", "compress"}, - {"Content-Type", "text/plain"} - ], - {ok, Code, _, _} = test_request:put(AttUrl, Headers, Data), - Code - end). - -should_create_compressible_att_with_ctype_params({Host, DbName}) -> - {timeout, ?TIMEOUT_EUNIT, ?_test(begin - HttpHost = "http://" ++ Host, - DocUrl = string:join([HttpHost, DbName, ?docid()], "/"), - AttUrl = string:join([DocUrl, ?b2l(?ATT_TXT_NAME)], "/"), - {ok, Data} = file:read_file(?FIXTURE_TXT), - Headers = [{"Content-Type", "text/plain; charset=UTF-8"}], - {ok, Code0, _, _} = test_request:put(AttUrl, Headers, Data), - ?assertEqual(201, Code0), - - {ok, Code1, _, Body} = test_request:get( - DocUrl ++ "?att_encoding_info=true"), - ?assertEqual(200, Code1), - Json = ejson:decode(Body), - {AttJson} = couch_util:get_nested_json_value( - Json, [<<"_attachments">>, ?ATT_TXT_NAME]), - ?assertEqual(<<"gzip">>, - couch_util:get_value(<<"encoding">>, AttJson)), - AttEncLength = iolist_size(gzip(Data)), - ?assertEqual(AttEncLength, - couch_util:get_value(<<"encoded_length">>, AttJson)), - ?assertEqual(byte_size(Data), - couch_util:get_value(<<"length">>, AttJson)) - end)}. - - -get_json(Json, Path) -> - couch_util:get_nested_json_value(Json, Path). - -to_hex(Val) -> - to_hex(Val, []). - -to_hex(0, Acc) -> - Acc; -to_hex(Val, Acc) -> - to_hex(Val div 16, [hex_char(Val rem 16) | Acc]). - -hex_char(V) when V < 10 -> $0 + V; -hex_char(V) -> $A + V - 10. - -chunked_body(Chunks) -> - chunked_body(Chunks, []). - -chunked_body([], Acc) -> - iolist_to_binary(lists:reverse(Acc, "0\r\n")); -chunked_body([Chunk | Rest], Acc) -> - Size = to_hex(size(Chunk)), - chunked_body(Rest, ["\r\n", Chunk, "\r\n", Size | Acc]). - -get_socket() -> - Options = [binary, {packet, 0}, {active, false}], - Addr = config:get("httpd", "bind_address", any), - Port = mochiweb_socket_server:get(couch_httpd, port), - {ok, Sock} = gen_tcp:connect(Addr, Port, Options), - Sock. - -request(Method, Url, Headers, Body) -> - RequestHead = [Method, " ", Url, " HTTP/1.1"], - RequestHeaders = [[string:join([Key, Value], ": "), "\r\n"] - || {Key, Value} <- Headers], - Request = [RequestHead, "\r\n", RequestHeaders, "\r\n", Body, "\r\n"], - Sock = get_socket(), - gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))), - timer:sleep(?TIMEWAIT), % must wait to receive complete response - {ok, R} = gen_tcp:recv(Sock, 0), - gen_tcp:close(Sock), - [Header, Body1] = re:split(R, "\r\n\r\n", [{return, binary}]), - {ok, {http_response, _, Code, _}, _} = - erlang:decode_packet(http, Header, []), - Json = ejson:decode(Body1), - {ok, Code, Json}. - -create_standalone_text_att(Host, DbName) -> - {ok, Data} = file:read_file(?FIXTURE_TXT), - Url = string:join([Host, DbName, "doc", ?b2l(?ATT_TXT_NAME)], "/"), - {ok, Code, _Headers, _Body} = test_request:put( - Url, [{"Content-Type", "text/plain"}], Data), - ?assertEqual(201, Code), - Url. - -create_standalone_png_att(Host, DbName) -> - {ok, Data} = file:read_file(?FIXTURE_PNG), - Url = string:join([Host, DbName, "doc", ?b2l(?ATT_BIN_NAME)], "/"), - {ok, Code, _Headers, _Body} = test_request:put( - Url, [{"Content-Type", "image/png"}], Data), - ?assertEqual(201, Code), - Url. - -create_inline_text_att(Host, DbName) -> - {ok, Data} = file:read_file(?FIXTURE_TXT), - Url = string:join([Host, DbName, "doc"], "/"), - Doc = {[ - {<<"_attachments">>, {[ - {?ATT_TXT_NAME, {[ - {<<"content_type">>, <<"text/plain">>}, - {<<"data">>, base64:encode(Data)} - ]} - }]}} - ]}, - {ok, Code, _Headers, _Body} = test_request:put( - Url, [{"Content-Type", "application/json"}], ejson:encode(Doc)), - ?assertEqual(201, Code), - string:join([Url, ?b2l(?ATT_TXT_NAME)], "/"). - -create_inline_png_att(Host, DbName) -> - {ok, Data} = file:read_file(?FIXTURE_PNG), - Url = string:join([Host, DbName, "doc"], "/"), - Doc = {[ - {<<"_attachments">>, {[ - {?ATT_BIN_NAME, {[ - {<<"content_type">>, <<"image/png">>}, - {<<"data">>, base64:encode(Data)} - ]} - }]}} - ]}, - {ok, Code, _Headers, _Body} = test_request:put( - Url, [{"Content-Type", "application/json"}], ejson:encode(Doc)), - ?assertEqual(201, Code), - string:join([Url, ?b2l(?ATT_BIN_NAME)], "/"). - -create_already_compressed_att(Host, DbName) -> - {ok, Data} = file:read_file(?FIXTURE_TXT), - Url = string:join([Host, DbName, "doc", ?b2l(?ATT_TXT_NAME)], "/"), - {ok, Code, _Headers, _Body} = test_request:put( - Url, [{"Content-Type", "text/plain"}, {"Content-Encoding", "gzip"}], - zlib:gzip(Data)), - ?assertEqual(201, Code), - Url. - -gzip(Data) -> - Z = zlib:open(), - ok = zlib:deflateInit(Z, ?COMPRESSION_LEVEL, deflated, 16 + 15, 8, default), - zlib:deflate(Z, Data), - Last = zlib:deflate(Z, [], finish), - ok = zlib:deflateEnd(Z), - ok = zlib:close(Z), - Last. +%% start() -> +%% ok = test_util:start_couch(), +%% % ensure in default compression settings for attachments_compression_tests +%% config:set("attachments", "compression_level", +%% ?i2l(?COMPRESSION_LEVEL), false), +%% config:set("attachments", "compressible_types", "text/*", false), +%% ok. + +%% setup() -> +%% DbName = ?tempdb(), +%% {ok, Db} = couch_db:create(DbName, []), +%% ok = couch_db:close(Db), +%% Addr = config:get("httpd", "bind_address", any), +%% Port = mochiweb_socket_server:get(couch_httpd, port), +%% Host = Addr ++ ":" ++ ?i2l(Port), +%% {Host, ?b2l(DbName)}. + +%% setup({binary, standalone}) -> +%% {Host, DbName} = setup(), +%% setup_att(fun create_standalone_png_att/2, Host, DbName, ?FIXTURE_PNG); +%% setup({text, standalone}) -> +%% {Host, DbName} = setup(), +%% setup_att(fun create_standalone_text_att/2, Host, DbName, ?FIXTURE_TXT); +%% setup({binary, inline}) -> +%% {Host, DbName} = setup(), +%% setup_att(fun create_inline_png_att/2, Host, DbName, ?FIXTURE_PNG); +%% setup({text, inline}) -> +%% {Host, DbName} = setup(), +%% setup_att(fun create_inline_text_att/2, Host, DbName, ?FIXTURE_TXT); +%% setup(compressed) -> +%% {Host, DbName} = setup(), +%% setup_att(fun create_already_compressed_att/2, Host, DbName, ?FIXTURE_TXT). +%% setup_att(Fun, Host, DbName, File) -> +%% HttpHost = "http://" ++ Host, +%% AttUrl = Fun(HttpHost, DbName), +%% {ok, Data} = file:read_file(File), +%% DocUrl = string:join([HttpHost, DbName, "doc"], "/"), +%% Helpers = {DbName, DocUrl, AttUrl}, +%% {Data, Helpers}. + +%% teardown(_, {_, {DbName, _, _}}) -> +%% teardown(DbName). + +%% teardown({_, DbName}) -> +%% teardown(DbName); +%% teardown(DbName) -> +%% ok = couch_server:delete(?l2b(DbName), []), +%% ok. + + +%% attachments_test_() -> +%% { +%% "Attachments tests", +%% { +%% setup, +%% fun start/0, fun test_util:stop_couch/1, +%% [ +%% attachments_md5_tests(), +%% attachments_compression_tests() +%% ] +%% } +%% }. + +%% attachments_md5_tests() -> +%% { +%% "Attachments MD5 tests", +%% { +%% foreach, +%% fun setup/0, fun teardown/1, +%% [ +%% fun should_upload_attachment_without_md5/1, +%% fun should_upload_attachment_by_chunks_without_md5/1, +%% fun should_upload_attachment_with_valid_md5_header/1, +%% fun should_upload_attachment_by_chunks_with_valid_md5_header/1, +%% fun should_upload_attachment_by_chunks_with_valid_md5_trailer/1, +%% fun should_reject_attachment_with_invalid_md5/1, +%% fun should_reject_chunked_attachment_with_invalid_md5/1, +%% fun should_reject_chunked_attachment_with_invalid_md5_trailer/1 +%% ] +%% } +%% }. + +%% attachments_compression_tests() -> +%% Funs = [ +%% fun should_get_att_without_accept_gzip_encoding/2, +%% fun should_get_att_with_accept_gzip_encoding/2, +%% fun should_get_att_with_accept_deflate_encoding/2, +%% fun should_return_406_response_on_unsupported_encoding/2, +%% fun should_get_doc_with_att_data/2, +%% fun should_get_doc_with_att_data_stub/2 +%% ], +%% { +%% "Attachments compression tests", +%% [ +%% { +%% "Created via Attachments API", +%% created_attachments_compression_tests(standalone, Funs) +%% }, +%% { +%% "Created inline via Document API", +%% created_attachments_compression_tests(inline, Funs) +%% }, +%% { +%% "Created already been compressed via Attachments API", +%% { +%% foreachx, +%% fun setup/1, fun teardown/2, +%% [{compressed, Fun} || Fun <- Funs] +%% } +%% }, +%% { +%% foreach, +%% fun setup/0, fun teardown/1, +%% [ +%% fun should_not_create_compressed_att_with_deflate_encoding/1, +%% fun should_not_create_compressed_att_with_compress_encoding/1, +%% fun should_create_compressible_att_with_ctype_params/1 +%% ] +%% } +%% ] +%% }. + +%% created_attachments_compression_tests(Mod, Funs) -> +%% [ +%% { +%% "Compressiable attachments", +%% { +%% foreachx, +%% fun setup/1, fun teardown/2, +%% [{{text, Mod}, Fun} || Fun <- Funs] +%% } +%% }, +%% { +%% "Uncompressiable attachments", +%% { +%% foreachx, +%% fun setup/1, fun teardown/2, +%% [{{binary, Mod}, Fun} || Fun <- Funs] +%% } +%% } +%% ]. + + + +%% should_upload_attachment_without_md5({Host, DbName}) -> +%% ?_test(begin +%% AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), +%% Body = "We all live in a yellow submarine!", +%% Headers = [ +%% {"Content-Length", "34"}, +%% {"Content-Type", "text/plain"}, +%% {"Host", Host} +%% ], +%% {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), +%% ?assertEqual(201, Code), +%% ?assertEqual(true, get_json(Json, [<<"ok">>])) +%% end). + +%% should_upload_attachment_by_chunks_without_md5({Host, DbName}) -> +%% ?_test(begin +%% AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), +%% AttData = <<"We all live in a yellow submarine!">>, +%% <<Part1:21/binary, Part2:13/binary>> = AttData, +%% Body = chunked_body([Part1, Part2]), +%% Headers = [ +%% {"Content-Type", "text/plain"}, +%% {"Transfer-Encoding", "chunked"}, +%% {"Host", Host} +%% ], +%% {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), +%% ?assertEqual(201, Code), +%% ?assertEqual(true, get_json(Json, [<<"ok">>])) +%% end). + +%% should_upload_attachment_with_valid_md5_header({Host, DbName}) -> +%% ?_test(begin +%% AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), +%% Body = "We all live in a yellow submarine!", +%% Headers = [ +%% {"Content-Length", "34"}, +%% {"Content-Type", "text/plain"}, +%% {"Content-MD5", ?b2l(base64:encode(couch_util:md5(Body)))}, +%% {"Host", Host} +%% ], +%% {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), +%% ?assertEqual(201, Code), +%% ?assertEqual(true, get_json(Json, [<<"ok">>])) +%% end). + +%% should_upload_attachment_by_chunks_with_valid_md5_header({Host, DbName}) -> +%% ?_test(begin +%% AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), +%% AttData = <<"We all live in a yellow submarine!">>, +%% <<Part1:21/binary, Part2:13/binary>> = AttData, +%% Body = chunked_body([Part1, Part2]), +%% Headers = [ +%% {"Content-Type", "text/plain"}, +%% {"Content-MD5", ?b2l(base64:encode(couch_util:md5(AttData)))}, +%% {"Host", Host}, +%% {"Transfer-Encoding", "chunked"} +%% ], +%% {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), +%% ?assertEqual(201, Code), +%% ?assertEqual(true, get_json(Json, [<<"ok">>])) +%% end). + +%% should_upload_attachment_by_chunks_with_valid_md5_trailer({Host, DbName}) -> +%% ?_test(begin +%% AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), +%% AttData = <<"We all live in a yellow submarine!">>, +%% <<Part1:21/binary, Part2:13/binary>> = AttData, +%% Body = [chunked_body([Part1, Part2]), +%% "Content-MD5: ", base64:encode(couch_util:md5(AttData)), +%% "\r\n"], +%% Headers = [ +%% {"Content-Type", "text/plain"}, +%% {"Host", Host}, +%% {"Trailer", "Content-MD5"}, +%% {"Transfer-Encoding", "chunked"} +%% ], +%% {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), +%% ?assertEqual(201, Code), +%% ?assertEqual(true, get_json(Json, [<<"ok">>])) +%% end). + +%% should_reject_attachment_with_invalid_md5({Host, DbName}) -> +%% ?_test(begin +%% AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), +%% Body = "We all live in a yellow submarine!", +%% Headers = [ +%% {"Content-Length", "34"}, +%% {"Content-Type", "text/plain"}, +%% {"Content-MD5", ?b2l(base64:encode(<<"foobar!">>))}, +%% {"Host", Host} +%% ], +%% {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), +%% ?assertEqual(400, Code), +%% ?assertEqual(<<"content_md5_mismatch">>, +%% get_json(Json, [<<"error">>])) +%% end). + + +%% should_reject_chunked_attachment_with_invalid_md5({Host, DbName}) -> +%% ?_test(begin +%% AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), +%% AttData = <<"We all live in a yellow submarine!">>, +%% <<Part1:21/binary, Part2:13/binary>> = AttData, +%% Body = chunked_body([Part1, Part2]), +%% Headers = [ +%% {"Content-Type", "text/plain"}, +%% {"Content-MD5", ?b2l(base64:encode(<<"foobar!">>))}, +%% {"Host", Host}, +%% {"Transfer-Encoding", "chunked"} +%% ], +%% {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), +%% ?assertEqual(400, Code), +%% ?assertEqual(<<"content_md5_mismatch">>, +%% get_json(Json, [<<"error">>])) +%% end). + +%% should_reject_chunked_attachment_with_invalid_md5_trailer({Host, DbName}) -> +%% ?_test(begin +%% AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"), +%% AttData = <<"We all live in a yellow submarine!">>, +%% <<Part1:21/binary, Part2:13/binary>> = AttData, +%% Body = [chunked_body([Part1, Part2]), +%% "Content-MD5: ", base64:encode(<<"foobar!">>), +%% "\r\n"], +%% Headers = [ +%% {"Content-Type", "text/plain"}, +%% {"Host", Host}, +%% {"Trailer", "Content-MD5"}, +%% {"Transfer-Encoding", "chunked"} +%% ], +%% {ok, Code, Json} = request("PUT", AttUrl, Headers, Body), +%% ?assertEqual(400, Code), +%% ?assertEqual(<<"content_md5_mismatch">>, get_json(Json, [<<"error">>])) +%% end). + +%% should_get_att_without_accept_gzip_encoding(_, {Data, {_, _, AttUrl}}) -> +%% ?_test(begin +%% {ok, Code, Headers, Body} = test_request:get(AttUrl), +%% ?assertEqual(200, Code), +%% ?assertNot(lists:member({"Content-Encoding", "gzip"}, Headers)), +%% ?assertEqual(Data, iolist_to_binary(Body)) +%% end). + +%% should_get_att_with_accept_gzip_encoding(compressed, {Data, {_, _, AttUrl}}) -> +%% ?_test(begin +%% {ok, Code, Headers, Body} = test_request:get( +%% AttUrl, [{"Accept-Encoding", "gzip"}]), +%% ?assertEqual(200, Code), +%% ?assert(lists:member({"Content-Encoding", "gzip"}, Headers)), +%% ?assertEqual(Data, zlib:gunzip(iolist_to_binary(Body))) +%% end); +%% should_get_att_with_accept_gzip_encoding({text, _}, {Data, {_, _, AttUrl}}) -> +%% ?_test(begin +%% {ok, Code, Headers, Body} = test_request:get( +%% AttUrl, [{"Accept-Encoding", "gzip"}]), +%% ?assertEqual(200, Code), +%% ?assert(lists:member({"Content-Encoding", "gzip"}, Headers)), +%% ?assertEqual(Data, zlib:gunzip(iolist_to_binary(Body))) +%% end); +%% should_get_att_with_accept_gzip_encoding({binary, _}, {Data, {_, _, AttUrl}}) -> +%% ?_test(begin +%% {ok, Code, Headers, Body} = test_request:get( +%% AttUrl, [{"Accept-Encoding", "gzip"}]), +%% ?assertEqual(200, Code), +%% ?assertEqual(undefined, +%% couch_util:get_value("Content-Encoding", Headers)), +%% ?assertEqual(Data, iolist_to_binary(Body)) +%% end). + +%% should_get_att_with_accept_deflate_encoding(_, {Data, {_, _, AttUrl}}) -> +%% ?_test(begin +%% {ok, Code, Headers, Body} = test_request:get( +%% AttUrl, [{"Accept-Encoding", "deflate"}]), +%% ?assertEqual(200, Code), +%% ?assertEqual(undefined, +%% couch_util:get_value("Content-Encoding", Headers)), +%% ?assertEqual(Data, iolist_to_binary(Body)) +%% end). + +%% should_return_406_response_on_unsupported_encoding(_, {_, {_, _, AttUrl}}) -> +%% ?_assertEqual(406, +%% begin +%% {ok, Code, _, _} = test_request:get( +%% AttUrl, [{"Accept-Encoding", "deflate, *;q=0"}]), +%% Code +%% end). + +%% should_get_doc_with_att_data(compressed, {Data, {_, DocUrl, _}}) -> +%% ?_test(begin +%% Url = DocUrl ++ "?attachments=true", +%% {ok, Code, _, Body} = test_request:get( +%% Url, [{"Accept", "application/json"}]), +%% ?assertEqual(200, Code), +%% Json = ejson:decode(Body), +%% AttJson = couch_util:get_nested_json_value( +%% Json, [<<"_attachments">>, ?ATT_TXT_NAME]), +%% AttData = couch_util:get_nested_json_value( +%% AttJson, [<<"data">>]), +%% ?assertEqual( +%% <<"text/plain">>, +%% couch_util:get_nested_json_value(AttJson,[<<"content_type">>])), +%% ?assertEqual(Data, base64:decode(AttData)) +%% end); +%% should_get_doc_with_att_data({text, _}, {Data, {_, DocUrl, _}}) -> +%% ?_test(begin +%% Url = DocUrl ++ "?attachments=true", +%% {ok, Code, _, Body} = test_request:get( +%% Url, [{"Accept", "application/json"}]), +%% ?assertEqual(200, Code), +%% Json = ejson:decode(Body), +%% AttJson = couch_util:get_nested_json_value( +%% Json, [<<"_attachments">>, ?ATT_TXT_NAME]), +%% AttData = couch_util:get_nested_json_value( +%% AttJson, [<<"data">>]), +%% ?assertEqual( +%% <<"text/plain">>, +%% couch_util:get_nested_json_value(AttJson,[<<"content_type">>])), +%% ?assertEqual(Data, base64:decode(AttData)) +%% end); +%% should_get_doc_with_att_data({binary, _}, {Data, {_, DocUrl, _}}) -> +%% ?_test(begin +%% Url = DocUrl ++ "?attachments=true", +%% {ok, Code, _, Body} = test_request:get( +%% Url, [{"Accept", "application/json"}]), +%% ?assertEqual(200, Code), +%% Json = ejson:decode(Body), +%% AttJson = couch_util:get_nested_json_value( +%% Json, [<<"_attachments">>, ?ATT_BIN_NAME]), +%% AttData = couch_util:get_nested_json_value( +%% AttJson, [<<"data">>]), +%% ?assertEqual( +%% <<"image/png">>, +%% couch_util:get_nested_json_value(AttJson,[<<"content_type">>])), +%% ?assertEqual(Data, base64:decode(AttData)) +%% end). + +%% should_get_doc_with_att_data_stub(compressed, {Data, {_, DocUrl, _}}) -> +%% ?_test(begin +%% Url = DocUrl ++ "?att_encoding_info=true", +%% {ok, Code, _, Body} = test_request:get( +%% Url, [{"Accept", "application/json"}]), +%% ?assertEqual(200, Code), +%% Json = ejson:decode(Body), +%% {AttJson} = couch_util:get_nested_json_value( +%% Json, [<<"_attachments">>, ?ATT_TXT_NAME]), +%% ?assertEqual(<<"gzip">>, +%% couch_util:get_value(<<"encoding">>, AttJson)), +%% AttLength = couch_util:get_value(<<"length">>, AttJson), +%% EncLength = couch_util:get_value(<<"encoded_length">>, AttJson), +%% ?assertEqual(AttLength, EncLength), +%% ?assertEqual(iolist_size(zlib:gzip(Data)), AttLength) +%% end); +%% should_get_doc_with_att_data_stub({text, _}, {Data, {_, DocUrl, _}}) -> +%% ?_test(begin +%% Url = DocUrl ++ "?att_encoding_info=true", +%% {ok, Code, _, Body} = test_request:get( +%% Url, [{"Accept", "application/json"}]), +%% ?assertEqual(200, Code), +%% Json = ejson:decode(Body), +%% {AttJson} = couch_util:get_nested_json_value( +%% Json, [<<"_attachments">>, ?ATT_TXT_NAME]), +%% ?assertEqual(<<"gzip">>, +%% couch_util:get_value(<<"encoding">>, AttJson)), +%% AttEncLength = iolist_size(gzip(Data)), +%% ?assertEqual(AttEncLength, +%% couch_util:get_value(<<"encoded_length">>, AttJson)), +%% ?assertEqual(byte_size(Data), +%% couch_util:get_value(<<"length">>, AttJson)) +%% end); +%% should_get_doc_with_att_data_stub({binary, _}, {Data, {_, DocUrl, _}}) -> +%% ?_test(begin +%% Url = DocUrl ++ "?att_encoding_info=true", +%% {ok, Code, _, Body} = test_request:get( +%% Url, [{"Accept", "application/json"}]), +%% ?assertEqual(200, Code), +%% Json = ejson:decode(Body), +%% {AttJson} = couch_util:get_nested_json_value( +%% Json, [<<"_attachments">>, ?ATT_BIN_NAME]), +%% ?assertEqual(undefined, +%% couch_util:get_value(<<"encoding">>, AttJson)), +%% ?assertEqual(undefined, +%% couch_util:get_value(<<"encoded_length">>, AttJson)), +%% ?assertEqual(byte_size(Data), +%% couch_util:get_value(<<"length">>, AttJson)) +%% end). + +%% should_not_create_compressed_att_with_deflate_encoding({Host, DbName}) -> +%% ?_assertEqual(415, +%% begin +%% HttpHost = "http://" ++ Host, +%% AttUrl = string:join([HttpHost, DbName, ?docid(), "file.txt"], "/"), +%% {ok, Data} = file:read_file(?FIXTURE_TXT), +%% Body = zlib:compress(Data), +%% Headers = [ +%% {"Content-Encoding", "deflate"}, +%% {"Content-Type", "text/plain"} +%% ], +%% {ok, Code, _, _} = test_request:put(AttUrl, Headers, Body), +%% Code +%% end). + +%% should_not_create_compressed_att_with_compress_encoding({Host, DbName}) -> +%% % Note: As of OTP R13B04, it seems there's no LZW compression +%% % (i.e. UNIX compress utility implementation) lib in OTP. +%% % However there's a simple working Erlang implementation at: +%% % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php +%% ?_assertEqual(415, +%% begin +%% HttpHost = "http://" ++ Host, +%% AttUrl = string:join([HttpHost, DbName, ?docid(), "file.txt"], "/"), +%% {ok, Data} = file:read_file(?FIXTURE_TXT), +%% Headers = [ +%% {"Content-Encoding", "compress"}, +%% {"Content-Type", "text/plain"} +%% ], +%% {ok, Code, _, _} = test_request:put(AttUrl, Headers, Data), +%% Code +%% end). + +%% should_create_compressible_att_with_ctype_params({Host, DbName}) -> +%% {timeout, ?TIMEOUT_EUNIT, ?_test(begin +%% HttpHost = "http://" ++ Host, +%% DocUrl = string:join([HttpHost, DbName, ?docid()], "/"), +%% AttUrl = string:join([DocUrl, ?b2l(?ATT_TXT_NAME)], "/"), +%% {ok, Data} = file:read_file(?FIXTURE_TXT), +%% Headers = [{"Content-Type", "text/plain; charset=UTF-8"}], +%% {ok, Code0, _, _} = test_request:put(AttUrl, Headers, Data), +%% ?assertEqual(201, Code0), + +%% {ok, Code1, _, Body} = test_request:get( +%% DocUrl ++ "?att_encoding_info=true"), +%% ?assertEqual(200, Code1), +%% Json = ejson:decode(Body), +%% {AttJson} = couch_util:get_nested_json_value( +%% Json, [<<"_attachments">>, ?ATT_TXT_NAME]), +%% ?assertEqual(<<"gzip">>, +%% couch_util:get_value(<<"encoding">>, AttJson)), +%% AttEncLength = iolist_size(gzip(Data)), +%% ?assertEqual(AttEncLength, +%% couch_util:get_value(<<"encoded_length">>, AttJson)), +%% ?assertEqual(byte_size(Data), +%% couch_util:get_value(<<"length">>, AttJson)) +%% end)}. + + +%% get_json(Json, Path) -> +%% couch_util:get_nested_json_value(Json, Path). + +%% to_hex(Val) -> +%% to_hex(Val, []). + +%% to_hex(0, Acc) -> +%% Acc; +%% to_hex(Val, Acc) -> +%% to_hex(Val div 16, [hex_char(Val rem 16) | Acc]). + +%% hex_char(V) when V < 10 -> $0 + V; +%% hex_char(V) -> $A + V - 10. + +%% chunked_body(Chunks) -> +%% chunked_body(Chunks, []). + +%% chunked_body([], Acc) -> +%% iolist_to_binary(lists:reverse(Acc, "0\r\n")); +%% chunked_body([Chunk | Rest], Acc) -> +%% Size = to_hex(size(Chunk)), +%% chunked_body(Rest, ["\r\n", Chunk, "\r\n", Size | Acc]). + +%% get_socket() -> +%% Options = [binary, {packet, 0}, {active, false}], +%% Addr = config:get("httpd", "bind_address", any), +%% Port = mochiweb_socket_server:get(couch_httpd, port), +%% {ok, Sock} = gen_tcp:connect(Addr, Port, Options), +%% Sock. + +%% request(Method, Url, Headers, Body) -> +%% RequestHead = [Method, " ", Url, " HTTP/1.1"], +%% RequestHeaders = [[string:join([Key, Value], ": "), "\r\n"] +%% || {Key, Value} <- Headers], +%% Request = [RequestHead, "\r\n", RequestHeaders, "\r\n", Body, "\r\n"], +%% Sock = get_socket(), +%% gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))), +%% timer:sleep(?TIMEWAIT), % must wait to receive complete response +%% {ok, R} = gen_tcp:recv(Sock, 0), +%% gen_tcp:close(Sock), +%% [Header, Body1] = re:split(R, "\r\n\r\n", [{return, binary}]), +%% {ok, {http_response, _, Code, _}, _} = +%% erlang:decode_packet(http, Header, []), +%% Json = ejson:decode(Body1), +%% {ok, Code, Json}. + +%% create_standalone_text_att(Host, DbName) -> +%% {ok, Data} = file:read_file(?FIXTURE_TXT), +%% Url = string:join([Host, DbName, "doc", ?b2l(?ATT_TXT_NAME)], "/"), +%% {ok, Code, _Headers, _Body} = test_request:put( +%% Url, [{"Content-Type", "text/plain"}], Data), +%% ?assertEqual(201, Code), +%% Url. + +%% create_standalone_png_att(Host, DbName) -> +%% {ok, Data} = file:read_file(?FIXTURE_PNG), +%% Url = string:join([Host, DbName, "doc", ?b2l(?ATT_BIN_NAME)], "/"), +%% {ok, Code, _Headers, _Body} = test_request:put( +%% Url, [{"Content-Type", "image/png"}], Data), +%% ?assertEqual(201, Code), +%% Url. + +%% create_inline_text_att(Host, DbName) -> +%% {ok, Data} = file:read_file(?FIXTURE_TXT), +%% Url = string:join([Host, DbName, "doc"], "/"), +%% Doc = {[ +%% {<<"_attachments">>, {[ +%% {?ATT_TXT_NAME, {[ +%% {<<"content_type">>, <<"text/plain">>}, +%% {<<"data">>, base64:encode(Data)} +%% ]} +%% }]}} +%% ]}, +%% {ok, Code, _Headers, _Body} = test_request:put( +%% Url, [{"Content-Type", "application/json"}], ejson:encode(Doc)), +%% ?assertEqual(201, Code), +%% string:join([Url, ?b2l(?ATT_TXT_NAME)], "/"). + +%% create_inline_png_att(Host, DbName) -> +%% {ok, Data} = file:read_file(?FIXTURE_PNG), +%% Url = string:join([Host, DbName, "doc"], "/"), +%% Doc = {[ +%% {<<"_attachments">>, {[ +%% {?ATT_BIN_NAME, {[ +%% {<<"content_type">>, <<"image/png">>}, +%% {<<"data">>, base64:encode(Data)} +%% ]} +%% }]}} +%% ]}, +%% {ok, Code, _Headers, _Body} = test_request:put( +%% Url, [{"Content-Type", "application/json"}], ejson:encode(Doc)), +%% ?assertEqual(201, Code), +%% string:join([Url, ?b2l(?ATT_BIN_NAME)], "/"). + +%% create_already_compressed_att(Host, DbName) -> +%% {ok, Data} = file:read_file(?FIXTURE_TXT), +%% Url = string:join([Host, DbName, "doc", ?b2l(?ATT_TXT_NAME)], "/"), +%% {ok, Code, _Headers, _Body} = test_request:put( +%% Url, [{"Content-Type", "text/plain"}, {"Content-Encoding", "gzip"}], +%% zlib:gzip(Data)), +%% ?assertEqual(201, Code), +%% Url. + +%% gzip(Data) -> +%% Z = zlib:open(), +%% ok = zlib:deflateInit(Z, ?COMPRESSION_LEVEL, deflated, 16 + 15, 8, default), +%% zlib:deflate(Z, Data), +%% Last = zlib:deflate(Z, [], finish), +%% ok = zlib:deflateEnd(Z), +%% ok = zlib:close(Z), +%% Last.
http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/94ae4be2/test/couchdb_compaction_daemon.erl ---------------------------------------------------------------------- diff --git a/test/couchdb_compaction_daemon.erl b/test/couchdb_compaction_daemon.erl index 24c5d7b..7db24fe 100644 --- a/test/couchdb_compaction_daemon.erl +++ b/test/couchdb_compaction_daemon.erl @@ -21,201 +21,201 @@ -define(TIMEOUT_S, ?TIMEOUT div 1000). -start() -> - ok = test_util:start_couch(), - config:set("compaction_daemon", "check_interval", "3", false), - config:set("compaction_daemon", "min_file_size", "100000", false), - ok. - -setup() -> - DbName = ?tempdb(), - {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]), - create_design_doc(Db), - ok = couch_db:close(Db), - DbName. - -teardown(DbName) -> - Configs = config:get("compactions"), - lists:foreach( - fun({Key, _}) -> - ok = config:delete("compactions", Key, false) - end, - Configs), - couch_server:delete(DbName, [?ADMIN_USER]), - ok. - - -compaction_daemon_test_() -> - { - "Compaction daemon tests", - { - setup, - fun start/0, fun test_util:stop_couch/1, - { - foreach, - fun setup/0, fun teardown/1, - [ - fun should_compact_by_default_rule/1, - fun should_compact_by_dbname_rule/1 - ] - } - } - }. - - -should_compact_by_default_rule(DbName) -> - {timeout, ?TIMEOUT_S, ?_test(begin - {ok, Db} = couch_db:open_int(DbName, []), - populate(DbName, 70, 70, 200 * 1024), - - {_, DbFileSize} = get_db_frag(DbName), - {_, ViewFileSize} = get_view_frag(DbName), - - ok = config:set("compactions", "_default", - "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]", - false), - - ok = timer:sleep(4000), % something >= check_interval - wait_compaction_finished(DbName), - ok = config:delete("compactions", "_default", false), - - {DbFrag2, DbFileSize2} = get_db_frag(DbName), - {ViewFrag2, ViewFileSize2} = get_view_frag(DbName), - - ?assert(DbFrag2 < 70), - ?assert(ViewFrag2 < 70), - - ?assert(DbFileSize > DbFileSize2), - ?assert(ViewFileSize > ViewFileSize2), - - ?assert(couch_db:is_idle(Db)), - ok = couch_db:close(Db) - end)}. - -should_compact_by_dbname_rule(DbName) -> - {timeout, ?TIMEOUT_S, ?_test(begin - {ok, Db} = couch_db:open_int(DbName, []), - populate(DbName, 70, 70, 200 * 1024), - - {_, DbFileSize} = get_db_frag(DbName), - {_, ViewFileSize} = get_view_frag(DbName), - - ok = config:set("compactions", ?b2l(DbName), - "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]", - false), - - ok = timer:sleep(4000), % something >= check_interval - wait_compaction_finished(DbName), - ok = config:delete("compactions", ?b2l(DbName), false), - - {DbFrag2, DbFileSize2} = get_db_frag(DbName), - {ViewFrag2, ViewFileSize2} = get_view_frag(DbName), - - ?assert(DbFrag2 < 70), - ?assert(ViewFrag2 < 70), - - ?assert(DbFileSize > DbFileSize2), - ?assert(ViewFileSize > ViewFileSize2), - - ?assert(couch_db:is_idle(Db)), - ok = couch_db:close(Db) - end)}. - - -create_design_doc(Db) -> - DDoc = couch_doc:from_json_obj({[ - {<<"_id">>, <<"_design/foo">>}, - {<<"language">>, <<"javascript">>}, - {<<"views">>, {[ - {<<"foo">>, {[ - {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>} - ]}}, - {<<"foo2">>, {[ - {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>} - ]}}, - {<<"foo3">>, {[ - {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>} - ]}} - ]}} - ]}), - {ok, _} = couch_db:update_docs(Db, [DDoc]), - {ok, _} = couch_db:ensure_full_commit(Db), - ok. - -populate(DbName, DbFrag, ViewFrag, MinFileSize) -> - {CurDbFrag, DbFileSize} = get_db_frag(DbName), - {CurViewFrag, ViewFileSize} = get_view_frag(DbName), - populate(DbName, DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag, - lists:min([DbFileSize, ViewFileSize])). - -populate(_Db, DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag, FileSize) - when CurDbFrag >= DbFrag, CurViewFrag >= ViewFrag, FileSize >= MinFileSize -> - ok; -populate(DbName, DbFrag, ViewFrag, MinFileSize, _, _, _) -> - update(DbName), - {CurDbFrag, DbFileSize} = get_db_frag(DbName), - {CurViewFrag, ViewFileSize} = get_view_frag(DbName), - populate(DbName, DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag, - lists:min([DbFileSize, ViewFileSize])). - -update(DbName) -> - {ok, Db} = couch_db:open_int(DbName, []), - lists:foreach(fun(_) -> - Doc = couch_doc:from_json_obj({[{<<"_id">>, couch_uuids:new()}]}), - {ok, _} = couch_db:update_docs(Db, [Doc]), - query_view(Db#db.name) - end, lists:seq(1, 200)), - couch_db:close(Db). - -db_url(DbName) -> - Addr = config:get("httpd", "bind_address", "127.0.0.1"), - Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)), - "http://" ++ Addr ++ ":" ++ Port ++ "/" ++ ?b2l(DbName). - -query_view(DbName) -> - {ok, Code, _Headers, _Body} = test_request:get( - db_url(DbName) ++ "/_design/foo/_view/foo"), - ?assertEqual(200, Code). - -get_db_frag(DbName) -> - {ok, Db} = couch_db:open_int(DbName, []), - {ok, Info} = couch_db:get_db_info(Db), - couch_db:close(Db), - FileSize = couch_util:get_value(disk_size, Info), - DataSize = couch_util:get_value(data_size, Info), - {round((FileSize - DataSize) / FileSize * 100), FileSize}. - -get_view_frag(DbName) -> - {ok, Db} = couch_db:open_int(DbName, []), - {ok, Info} = couch_mrview:get_info(Db, <<"_design/foo">>), - couch_db:close(Db), - FileSize = couch_util:get_value(disk_size, Info), - DataSize = couch_util:get_value(data_size, Info), - {round((FileSize - DataSize) / FileSize * 100), FileSize}. - -wait_compaction_finished(DbName) -> - Parent = self(), - Loop = spawn_link(fun() -> wait_loop(DbName, Parent) end), - receive - {done, Loop} -> - ok - after ?TIMEOUT -> - erlang:error( - {assertion_failed, - [{module, ?MODULE}, {line, ?LINE}, - {reason, "Compaction timeout"}]}) - end. - -wait_loop(DbName, Parent) -> - {ok, Db} = couch_db:open_int(DbName, []), - {ok, DbInfo} = couch_db:get_db_info(Db), - {ok, ViewInfo} = couch_mrview:get_info(Db, <<"_design/foo">>), - couch_db:close(Db), - case (couch_util:get_value(compact_running, ViewInfo) =:= true) orelse - (couch_util:get_value(compact_running, DbInfo) =:= true) of - false -> - Parent ! {done, self()}; - true -> - ok = timer:sleep(?DELAY), - wait_loop(DbName, Parent) - end. +%% start() -> +%% ok = test_util:start_couch(), +%% config:set("compaction_daemon", "check_interval", "3", false), +%% config:set("compaction_daemon", "min_file_size", "100000", false), +%% ok. + +%% setup() -> +%% DbName = ?tempdb(), +%% {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]), +%% create_design_doc(Db), +%% ok = couch_db:close(Db), +%% DbName. + +%% teardown(DbName) -> +%% Configs = config:get("compactions"), +%% lists:foreach( +%% fun({Key, _}) -> +%% ok = config:delete("compactions", Key, false) +%% end, +%% Configs), +%% couch_server:delete(DbName, [?ADMIN_USER]), +%% ok. + + +%% compaction_daemon_test_() -> +%% { +%% "Compaction daemon tests", +%% { +%% setup, +%% fun start/0, fun test_util:stop_couch/1, +%% { +%% foreach, +%% fun setup/0, fun teardown/1, +%% [ +%% fun should_compact_by_default_rule/1, +%% fun should_compact_by_dbname_rule/1 +%% ] +%% } +%% } +%% }. + + +%% should_compact_by_default_rule(DbName) -> +%% {timeout, ?TIMEOUT_S, ?_test(begin +%% {ok, Db} = couch_db:open_int(DbName, []), +%% populate(DbName, 70, 70, 200 * 1024), + +%% {_, DbFileSize} = get_db_frag(DbName), +%% {_, ViewFileSize} = get_view_frag(DbName), + +%% ok = config:set("compactions", "_default", +%% "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]", +%% false), + +%% ok = timer:sleep(4000), % something >= check_interval +%% wait_compaction_finished(DbName), +%% ok = config:delete("compactions", "_default", false), + +%% {DbFrag2, DbFileSize2} = get_db_frag(DbName), +%% {ViewFrag2, ViewFileSize2} = get_view_frag(DbName), + +%% ?assert(DbFrag2 < 70), +%% ?assert(ViewFrag2 < 70), + +%% ?assert(DbFileSize > DbFileSize2), +%% ?assert(ViewFileSize > ViewFileSize2), + +%% ?assert(couch_db:is_idle(Db)), +%% ok = couch_db:close(Db) +%% end)}. + +%% should_compact_by_dbname_rule(DbName) -> +%% {timeout, ?TIMEOUT_S, ?_test(begin +%% {ok, Db} = couch_db:open_int(DbName, []), +%% populate(DbName, 70, 70, 200 * 1024), + +%% {_, DbFileSize} = get_db_frag(DbName), +%% {_, ViewFileSize} = get_view_frag(DbName), + +%% ok = config:set("compactions", ?b2l(DbName), +%% "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]", +%% false), + +%% ok = timer:sleep(4000), % something >= check_interval +%% wait_compaction_finished(DbName), +%% ok = config:delete("compactions", ?b2l(DbName), false), + +%% {DbFrag2, DbFileSize2} = get_db_frag(DbName), +%% {ViewFrag2, ViewFileSize2} = get_view_frag(DbName), + +%% ?assert(DbFrag2 < 70), +%% ?assert(ViewFrag2 < 70), + +%% ?assert(DbFileSize > DbFileSize2), +%% ?assert(ViewFileSize > ViewFileSize2), + +%% ?assert(couch_db:is_idle(Db)), +%% ok = couch_db:close(Db) +%% end)}. + + +%% create_design_doc(Db) -> +%% DDoc = couch_doc:from_json_obj({[ +%% {<<"_id">>, <<"_design/foo">>}, +%% {<<"language">>, <<"javascript">>}, +%% {<<"views">>, {[ +%% {<<"foo">>, {[ +%% {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>} +%% ]}}, +%% {<<"foo2">>, {[ +%% {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>} +%% ]}}, +%% {<<"foo3">>, {[ +%% {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>} +%% ]}} +%% ]}} +%% ]}), +%% {ok, _} = couch_db:update_docs(Db, [DDoc]), +%% {ok, _} = couch_db:ensure_full_commit(Db), +%% ok. + +%% populate(DbName, DbFrag, ViewFrag, MinFileSize) -> +%% {CurDbFrag, DbFileSize} = get_db_frag(DbName), +%% {CurViewFrag, ViewFileSize} = get_view_frag(DbName), +%% populate(DbName, DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag, +%% lists:min([DbFileSize, ViewFileSize])). + +%% populate(_Db, DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag, FileSize) +%% when CurDbFrag >= DbFrag, CurViewFrag >= ViewFrag, FileSize >= MinFileSize -> +%% ok; +%% populate(DbName, DbFrag, ViewFrag, MinFileSize, _, _, _) -> +%% update(DbName), +%% {CurDbFrag, DbFileSize} = get_db_frag(DbName), +%% {CurViewFrag, ViewFileSize} = get_view_frag(DbName), +%% populate(DbName, DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag, +%% lists:min([DbFileSize, ViewFileSize])). + +%% update(DbName) -> +%% {ok, Db} = couch_db:open_int(DbName, []), +%% lists:foreach(fun(_) -> +%% Doc = couch_doc:from_json_obj({[{<<"_id">>, couch_uuids:new()}]}), +%% {ok, _} = couch_db:update_docs(Db, [Doc]), +%% query_view(Db#db.name) +%% end, lists:seq(1, 200)), +%% couch_db:close(Db). + +%% db_url(DbName) -> +%% Addr = config:get("httpd", "bind_address", "127.0.0.1"), +%% Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)), +%% "http://" ++ Addr ++ ":" ++ Port ++ "/" ++ ?b2l(DbName). + +%% query_view(DbName) -> +%% {ok, Code, _Headers, _Body} = test_request:get( +%% db_url(DbName) ++ "/_design/foo/_view/foo"), +%% ?assertEqual(200, Code). + +%% get_db_frag(DbName) -> +%% {ok, Db} = couch_db:open_int(DbName, []), +%% {ok, Info} = couch_db:get_db_info(Db), +%% couch_db:close(Db), +%% FileSize = couch_util:get_value(disk_size, Info), +%% DataSize = couch_util:get_value(data_size, Info), +%% {round((FileSize - DataSize) / FileSize * 100), FileSize}. + +%% get_view_frag(DbName) -> +%% {ok, Db} = couch_db:open_int(DbName, []), +%% {ok, Info} = couch_mrview:get_info(Db, <<"_design/foo">>), +%% couch_db:close(Db), +%% FileSize = couch_util:get_value(disk_size, Info), +%% DataSize = couch_util:get_value(data_size, Info), +%% {round((FileSize - DataSize) / FileSize * 100), FileSize}. + +%% wait_compaction_finished(DbName) -> +%% Parent = self(), +%% Loop = spawn_link(fun() -> wait_loop(DbName, Parent) end), +%% receive +%% {done, Loop} -> +%% ok +%% after ?TIMEOUT -> +%% erlang:error( +%% {assertion_failed, +%% [{module, ?MODULE}, {line, ?LINE}, +%% {reason, "Compaction timeout"}]}) +%% end. + +%% wait_loop(DbName, Parent) -> +%% {ok, Db} = couch_db:open_int(DbName, []), +%% {ok, DbInfo} = couch_db:get_db_info(Db), +%% {ok, ViewInfo} = couch_mrview:get_info(Db, <<"_design/foo">>), +%% couch_db:close(Db), +%% case (couch_util:get_value(compact_running, ViewInfo) =:= true) orelse +%% (couch_util:get_value(compact_running, DbInfo) =:= true) of +%% false -> +%% Parent ! {done, self()}; +%% true -> +%% ok = timer:sleep(?DELAY), +%% wait_loop(DbName, Parent) +%% end. http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/94ae4be2/test/couchdb_file_compression_tests.erl ---------------------------------------------------------------------- diff --git a/test/couchdb_file_compression_tests.erl b/test/couchdb_file_compression_tests.erl index 5b12882..d43b2c0 100644 --- a/test/couchdb_file_compression_tests.erl +++ b/test/couchdb_file_compression_tests.erl @@ -21,205 +21,205 @@ -define(TIMEOUT, 30000). -setup() -> - config:set("couchdb", "file_compression", "none", false), - DbName = ?tempdb(), - {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]), - ok = populate_db(Db, ?DOCS_COUNT), - DDoc = couch_doc:from_json_obj({[ - {<<"_id">>, ?DDOC_ID}, - {<<"language">>, <<"javascript">>}, - {<<"views">>, {[ - {<<"by_id">>, {[ - {<<"map">>, <<"function(doc){emit(doc._id, doc.string);}">>} - ]}} - ]} - } - ]}), - {ok, _} = couch_db:update_doc(Db, DDoc, []), - refresh_index(DbName), - ok = couch_db:close(Db), - DbName. - -teardown(DbName) -> - ok = couch_server:delete(DbName, [?ADMIN_USER]), - ok. - - -couch_auth_cache_test_() -> - { - "CouchDB file compression tests", - { - setup, - fun test_util:start_couch/0, fun test_util:stop_couch/1, - { - foreach, - fun setup/0, fun teardown/1, - [ - fun should_use_none/1, - fun should_use_deflate_1/1, - fun should_use_deflate_9/1, - fun should_use_snappy/1, - fun should_compare_compression_methods/1 - ] - } - } - }. - - -should_use_none(DbName) -> - config:set("couchdb", "file_compression", "none", false), - { - "Use no compression", - [ - {"compact database", ?_test(compact_db(DbName))}, - {"compact view", ?_test(compact_view(DbName))} - ] - }. - -should_use_deflate_1(DbName) -> - config:set("couchdb", "file_compression", "deflate_1", false), - { - "Use deflate compression at level 1", - [ - {"compact database", ?_test(compact_db(DbName))}, - {"compact view", ?_test(compact_view(DbName))} - ] - }. - -should_use_deflate_9(DbName) -> - config:set("couchdb", "file_compression", "deflate_9", false), - { - "Use deflate compression at level 9", - [ - {"compact database", ?_test(compact_db(DbName))}, - {"compact view", ?_test(compact_view(DbName))} - ] - }. - -should_use_snappy(DbName) -> - config:set("couchdb", "file_compression", "snappy", false), - { - "Use snappy compression", - [ - {"compact database", ?_test(compact_db(DbName))}, - {"compact view", ?_test(compact_view(DbName))} - ] - }. - -should_compare_compression_methods(DbName) -> - {"none > snappy > deflate_1 > deflate_9", - {timeout, ?TIMEOUT div 1000, ?_test(compare_compression_methods(DbName))}}. - -compare_compression_methods(DbName) -> - config:set("couchdb", "file_compression", "none", false), - compact_db(DbName), - compact_view(DbName), - DbSizeNone = db_disk_size(DbName), - ViewSizeNone = view_disk_size(DbName), - - config:set("couchdb", "file_compression", "snappy", false), - compact_db(DbName), - compact_view(DbName), - DbSizeSnappy = db_disk_size(DbName), - ViewSizeSnappy = view_disk_size(DbName), - - ?assert(DbSizeNone > DbSizeSnappy), - ?assert(ViewSizeNone > ViewSizeSnappy), - - config:set("couchdb", "file_compression", "deflate_1", false), - compact_db(DbName), - compact_view(DbName), - DbSizeDeflate1 = db_disk_size(DbName), - ViewSizeDeflate1 = view_disk_size(DbName), - - ?assert(DbSizeSnappy > DbSizeDeflate1), - ?assert(ViewSizeSnappy > ViewSizeDeflate1), - - config:set("couchdb", "file_compression", "deflate_9", false), - compact_db(DbName), - compact_view(DbName), - DbSizeDeflate9 = db_disk_size(DbName), - ViewSizeDeflate9 = view_disk_size(DbName), - - ?assert(DbSizeDeflate1 > DbSizeDeflate9), - ?assert(ViewSizeDeflate1 > ViewSizeDeflate9). - - -populate_db(_Db, NumDocs) when NumDocs =< 0 -> - ok; -populate_db(Db, NumDocs) -> - Docs = lists:map( - fun(_) -> - couch_doc:from_json_obj({[ - {<<"_id">>, couch_uuids:random()}, - {<<"string">>, ?l2b(lists:duplicate(1000, $X))} - ]}) - end, - lists:seq(1, 500)), - {ok, _} = couch_db:update_docs(Db, Docs, []), - populate_db(Db, NumDocs - 500). - -refresh_index(DbName) -> - {ok, Db} = couch_db:open_int(DbName, []), - {ok, DDoc} = couch_db:open_doc(Db, ?DDOC_ID, [ejson_body]), - couch_mrview:query_view(Db, DDoc, <<"by_id">>, [{stale, false}]), - ok = couch_db:close(Db). - -compact_db(DbName) -> - DiskSizeBefore = db_disk_size(DbName), - {ok, Db} = couch_db:open_int(DbName, []), - {ok, CompactPid} = couch_db:start_compact(Db), - MonRef = erlang:monitor(process, CompactPid), - receive - {'DOWN', MonRef, process, CompactPid, normal} -> - ok; - {'DOWN', MonRef, process, CompactPid, Reason} -> - erlang:error({assertion_failed, - [{module, ?MODULE}, - {line, ?LINE}, - {reason, "Error compacting database: " - ++ couch_util:to_list(Reason)}]}) - after ?TIMEOUT -> - erlang:error({assertion_failed, - [{module, ?MODULE}, - {line, ?LINE}, - {reason, "Timeout waiting for database compaction"}]}) - end, - ok = couch_db:close(Db), - DiskSizeAfter = db_disk_size(DbName), - ?assert(DiskSizeBefore > DiskSizeAfter). - -compact_view(DbName) -> - DiskSizeBefore = view_disk_size(DbName), - {ok, MonRef} = couch_mrview:compact(DbName, ?DDOC_ID, [monitor]), - receive - {'DOWN', MonRef, process, _CompactPid, normal} -> - ok; - {'DOWN', MonRef, process, _CompactPid, Reason} -> - erlang:error({assertion_failed, - [{module, ?MODULE}, - {line, ?LINE}, - {reason, "Error compacting view group: " - ++ couch_util:to_list(Reason)}]}) - after ?TIMEOUT -> - erlang:error({assertion_failed, - [{module, ?MODULE}, - {line, ?LINE}, - {reason, "Timeout waiting for view group compaction"}]}) - end, - DiskSizeAfter = view_disk_size(DbName), - ?assert(DiskSizeBefore > DiskSizeAfter). - -db_disk_size(DbName) -> - {ok, Db} = couch_db:open_int(DbName, []), - {ok, Info} = couch_db:get_db_info(Db), - ok = couch_db:close(Db), - couch_util:get_value(disk_size, Info). - -view_disk_size(DbName) -> - {ok, Db} = couch_db:open_int(DbName, []), - {ok, DDoc} = couch_db:open_doc(Db, ?DDOC_ID, [ejson_body]), - {ok, Info} = couch_mrview:get_info(Db, DDoc), - ok = couch_db:close(Db), - couch_util:get_value(disk_size, Info). +%% setup() -> +%% config:set("couchdb", "file_compression", "none", false), +%% DbName = ?tempdb(), +%% {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]), +%% ok = populate_db(Db, ?DOCS_COUNT), +%% DDoc = couch_doc:from_json_obj({[ +%% {<<"_id">>, ?DDOC_ID}, +%% {<<"language">>, <<"javascript">>}, +%% {<<"views">>, {[ +%% {<<"by_id">>, {[ +%% {<<"map">>, <<"function(doc){emit(doc._id, doc.string);}">>} +%% ]}} +%% ]} +%% } +%% ]}), +%% {ok, _} = couch_db:update_doc(Db, DDoc, []), +%% refresh_index(DbName), +%% ok = couch_db:close(Db), +%% DbName. + +%% teardown(DbName) -> +%% ok = couch_server:delete(DbName, [?ADMIN_USER]), +%% ok. + + +%% couch_auth_cache_test_() -> +%% { +%% "CouchDB file compression tests", +%% { +%% setup, +%% fun test_util:start_couch/0, fun test_util:stop_couch/1, +%% { +%% foreach, +%% fun setup/0, fun teardown/1, +%% [ +%% fun should_use_none/1, +%% fun should_use_deflate_1/1, +%% fun should_use_deflate_9/1, +%% fun should_use_snappy/1, +%% fun should_compare_compression_methods/1 +%% ] +%% } +%% } +%% }. + + +%% should_use_none(DbName) -> +%% config:set("couchdb", "file_compression", "none", false), +%% { +%% "Use no compression", +%% [ +%% {"compact database", ?_test(compact_db(DbName))}, +%% {"compact view", ?_test(compact_view(DbName))} +%% ] +%% }. + +%% should_use_deflate_1(DbName) -> +%% config:set("couchdb", "file_compression", "deflate_1", false), +%% { +%% "Use deflate compression at level 1", +%% [ +%% {"compact database", ?_test(compact_db(DbName))}, +%% {"compact view", ?_test(compact_view(DbName))} +%% ] +%% }. + +%% should_use_deflate_9(DbName) -> +%% config:set("couchdb", "file_compression", "deflate_9", false), +%% { +%% "Use deflate compression at level 9", +%% [ +%% {"compact database", ?_test(compact_db(DbName))}, +%% {"compact view", ?_test(compact_view(DbName))} +%% ] +%% }. + +%% should_use_snappy(DbName) -> +%% config:set("couchdb", "file_compression", "snappy", false), +%% { +%% "Use snappy compression", +%% [ +%% {"compact database", ?_test(compact_db(DbName))}, +%% {"compact view", ?_test(compact_view(DbName))} +%% ] +%% }. + +%% should_compare_compression_methods(DbName) -> +%% {"none > snappy > deflate_1 > deflate_9", +%% {timeout, ?TIMEOUT div 1000, ?_test(compare_compression_methods(DbName))}}. + +%% compare_compression_methods(DbName) -> +%% config:set("couchdb", "file_compression", "none", false), +%% compact_db(DbName), +%% compact_view(DbName), +%% DbSizeNone = db_disk_size(DbName), +%% ViewSizeNone = view_disk_size(DbName), + +%% config:set("couchdb", "file_compression", "snappy", false), +%% compact_db(DbName), +%% compact_view(DbName), +%% DbSizeSnappy = db_disk_size(DbName), +%% ViewSizeSnappy = view_disk_size(DbName), + +%% ?assert(DbSizeNone > DbSizeSnappy), +%% ?assert(ViewSizeNone > ViewSizeSnappy), + +%% config:set("couchdb", "file_compression", "deflate_1", false), +%% compact_db(DbName), +%% compact_view(DbName), +%% DbSizeDeflate1 = db_disk_size(DbName), +%% ViewSizeDeflate1 = view_disk_size(DbName), + +%% ?assert(DbSizeSnappy > DbSizeDeflate1), +%% ?assert(ViewSizeSnappy > ViewSizeDeflate1), + +%% config:set("couchdb", "file_compression", "deflate_9", false), +%% compact_db(DbName), +%% compact_view(DbName), +%% DbSizeDeflate9 = db_disk_size(DbName), +%% ViewSizeDeflate9 = view_disk_size(DbName), + +%% ?assert(DbSizeDeflate1 > DbSizeDeflate9), +%% ?assert(ViewSizeDeflate1 > ViewSizeDeflate9). + + +%% populate_db(_Db, NumDocs) when NumDocs =< 0 -> +%% ok; +%% populate_db(Db, NumDocs) -> +%% Docs = lists:map( +%% fun(_) -> +%% couch_doc:from_json_obj({[ +%% {<<"_id">>, couch_uuids:random()}, +%% {<<"string">>, ?l2b(lists:duplicate(1000, $X))} +%% ]}) +%% end, +%% lists:seq(1, 500)), +%% {ok, _} = couch_db:update_docs(Db, Docs, []), +%% populate_db(Db, NumDocs - 500). + +%% refresh_index(DbName) -> +%% {ok, Db} = couch_db:open_int(DbName, []), +%% {ok, DDoc} = couch_db:open_doc(Db, ?DDOC_ID, [ejson_body]), +%% couch_mrview:query_view(Db, DDoc, <<"by_id">>, [{stale, false}]), +%% ok = couch_db:close(Db). + +%% compact_db(DbName) -> +%% DiskSizeBefore = db_disk_size(DbName), +%% {ok, Db} = couch_db:open_int(DbName, []), +%% {ok, CompactPid} = couch_db:start_compact(Db), +%% MonRef = erlang:monitor(process, CompactPid), +%% receive +%% {'DOWN', MonRef, process, CompactPid, normal} -> +%% ok; +%% {'DOWN', MonRef, process, CompactPid, Reason} -> +%% erlang:error({assertion_failed, +%% [{module, ?MODULE}, +%% {line, ?LINE}, +%% {reason, "Error compacting database: " +%% ++ couch_util:to_list(Reason)}]}) +%% after ?TIMEOUT -> +%% erlang:error({assertion_failed, +%% [{module, ?MODULE}, +%% {line, ?LINE}, +%% {reason, "Timeout waiting for database compaction"}]}) +%% end, +%% ok = couch_db:close(Db), +%% DiskSizeAfter = db_disk_size(DbName), +%% ?assert(DiskSizeBefore > DiskSizeAfter). + +%% compact_view(DbName) -> +%% DiskSizeBefore = view_disk_size(DbName), +%% {ok, MonRef} = couch_mrview:compact(DbName, ?DDOC_ID, [monitor]), +%% receive +%% {'DOWN', MonRef, process, _CompactPid, normal} -> +%% ok; +%% {'DOWN', MonRef, process, _CompactPid, Reason} -> +%% erlang:error({assertion_failed, +%% [{module, ?MODULE}, +%% {line, ?LINE}, +%% {reason, "Error compacting view group: " +%% ++ couch_util:to_list(Reason)}]}) +%% after ?TIMEOUT -> +%% erlang:error({assertion_failed, +%% [{module, ?MODULE}, +%% {line, ?LINE}, +%% {reason, "Timeout waiting for view group compaction"}]}) +%% end, +%% DiskSizeAfter = view_disk_size(DbName), +%% ?assert(DiskSizeBefore > DiskSizeAfter). + +%% db_disk_size(DbName) -> +%% {ok, Db} = couch_db:open_int(DbName, []), +%% {ok, Info} = couch_db:get_db_info(Db), +%% ok = couch_db:close(Db), +%% couch_util:get_value(disk_size, Info). + +%% view_disk_size(DbName) -> +%% {ok, Db} = couch_db:open_int(DbName, []), +%% {ok, DDoc} = couch_db:open_doc(Db, ?DDOC_ID, [ejson_body]), +%% {ok, Info} = couch_mrview:get_info(Db, DDoc), +%% ok = couch_db:close(Db), +%% couch_util:get_value(disk_size, Info).
