diff --git a/.asf.yaml b/.asf.yaml new file mode 100644 index 00000000000..a3b51fffa6b --- /dev/null +++ b/.asf.yaml @@ -0,0 +1,32 @@ +# see https://cwiki.apache.org/confluence/display/INFRA/git+-+.asf.yaml+features#git.asf.yamlfeatures-GitHubsettings + +github: + description: "Seamless multi-master syncing database with an intuitive HTTP/JSON API, designed for reliability" + homepage: https://couchdb.apache.org/ + labels: + - database + - content + - network-server + - http + - cloud + - erlang + - javascript + - couchdb + - big-data + - network-client + features: + issues: true + projects: true + enabled_merge_buttons: + squash: true + rebase: true + merge: false + +notifications: + commits: commits@couchdb.apache.org + issues: notifications@couchdb.apache.org + pullrequests: notifications@couchdb.apache.org + # This would send new/closed PR notifications to dev@ + #pullrequests_status: dev@couchdb.apache.org + # This would send individual PR comments/reviews to notifications@ + #pullrequests_comment: notifications@couchdb.apache.org diff --git a/.credo.exs b/.credo.exs index c2ffd19d009..64d281e5e26 100644 --- a/.credo.exs +++ b/.credo.exs @@ -110,6 +110,7 @@ {Credo.Check.Readability.StringSigils, []}, {Credo.Check.Readability.TrailingBlankLine, []}, {Credo.Check.Readability.TrailingWhiteSpace, []}, + {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, {Credo.Check.Readability.VariableNames, []}, # @@ -119,7 +120,7 @@ {Credo.Check.Refactor.CyclomaticComplexity, false}, {Credo.Check.Refactor.FunctionArity, []}, {Credo.Check.Refactor.LongQuoteBlocks, false}, - {Credo.Check.Refactor.MapInto, []}, + {Credo.Check.Refactor.MapInto, false}, # Disabled since not compatible with Elixir > 1.9 {Credo.Check.Refactor.MatchInCondition, []}, {Credo.Check.Refactor.NegatedConditionsInUnless, []}, {Credo.Check.Refactor.NegatedConditionsWithElse, []}, @@ -130,6 +131,7 @@ excluded_functions: [] ]}, {Credo.Check.Refactor.UnlessWithElse, []}, + {Credo.Check.Refactor.WithClauses, []}, # ## Warnings @@ -138,7 +140,8 @@ {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, {Credo.Check.Warning.IExPry, []}, {Credo.Check.Warning.IoInspect, []}, - {Credo.Check.Warning.LazyLogging, []}, + {Credo.Check.Warning.LazyLogging, false}, # Disabled since not compatible with Elixir > 1.9 + {Credo.Check.Warning.MixEnv, []}, {Credo.Check.Warning.OperationOnSameValues, []}, {Credo.Check.Warning.OperationWithConstantResult, []}, {Credo.Check.Warning.RaiseInsideRescue, []}, @@ -150,10 +153,12 @@ {Credo.Check.Warning.UnusedRegexOperation, []}, {Credo.Check.Warning.UnusedStringOperation, []}, {Credo.Check.Warning.UnusedTupleOperation, []}, + {Credo.Check.Warning.UnsafeExec, []}, # # Controversial and experimental checks (opt-in, just remove `, false`) # + {Credo.Check.Readability.StrictModuleLayout, false}, {Credo.Check.Consistency.MultiAliasImportRequireUse, false}, {Credo.Check.Design.DuplicatedCode, false}, {Credo.Check.Readability.Specs, false}, diff --git a/.gitignore b/.gitignore index 60e6d145ae7..6223d732245 100644 --- a/.gitignore +++ b/.gitignore @@ -13,6 +13,8 @@ .rebar/ .eunit/ cover/ +core +debian/ log apache-couchdb-*/ bin/ @@ -45,6 +47,7 @@ src/couch/priv/couch_js/**/*.d src/couch/priv/icu_driver/couch_icu_driver.d src/mango/src/mango_cursor_text.nocompile src/docs/ +src/emilio/ src/ets_lru/ src/excoveralls/ src/fauxton/ @@ -115,6 +118,7 @@ src/mango/ebin/ src/mango/test/*.pyc src/mango/nosetests.xml src/mango/venv/ +src/jwtf/.rebar3/ test/javascript/junit.xml /_build/ diff --git a/INSTALL.Unix.md b/INSTALL.Unix.md index 1934e9be94b..cb45e9ad43e 100644 --- a/INSTALL.Unix.md +++ b/INSTALL.Unix.md @@ -90,7 +90,7 @@ You can install the dependencies by running: sudo yum install autoconf autoconf-archive automake \ curl-devel erlang-asn1 erlang-erts erlang-eunit \ - erlang-os_mon erlang-xmerl help2man \ + erlang-xmerl help2man \ js-devel-1.8.5 libicu-devel libtool perl-Test-Harness You can install the Node.JS dependencies via [NodeSource](https://github.com/nodesource/distributions#rpminstall). diff --git a/LICENSE b/LICENSE index 048ee41a581..e578d365440 100644 --- a/LICENSE +++ b/LICENSE @@ -187,7 +187,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 The Apache Foundation + Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile index e229ee55b38..390aa4685c3 100644 --- a/Makefile +++ b/Makefile @@ -147,8 +147,8 @@ fauxton: share/www .PHONY: check # target: check - Test everything check: all python-black + @$(MAKE) emilio @$(MAKE) eunit - @$(MAKE) javascript @$(MAKE) mango-test @$(MAKE) elixir @@ -198,6 +198,9 @@ soak-eunit: couch @$(REBAR) setup_eunit 2> /dev/null while [ $$? -eq 0 ] ; do $(REBAR) -r eunit $(EUNIT_OPTS) ; done +emilio: + @bin/emilio -c emilio.config src/ | bin/warnings_in_scope -s 3 + .venv/bin/black: @python3 -m venv .venv @.venv/bin/pip3 install black || touch .venv/bin/black @@ -209,7 +212,7 @@ python-black: .venv/bin/black @python3 -c "import sys; exit(1 if sys.version_info >= (3,6) else 0)" || \ LC_ALL=C.UTF-8 LANG=C.UTF-8 .venv/bin/black --check \ --exclude="build/|buck-out/|dist/|_build/|\.git/|\.hg/|\.mypy_cache/|\.nox/|\.tox/|\.venv/|src/rebar/pr2relnotes.py|src/fauxton" \ - . dev/run test/javascript/run src/mango src/docs + build-aux/*.py dev/run test/javascript/run src/mango/test/*.py src/docs/src/conf.py src/docs/ext/*.py . python-black-update: .venv/bin/black @python3 -c "import sys; exit(1 if sys.version_info < (3,6) else 0)" || \ @@ -217,13 +220,16 @@ python-black-update: .venv/bin/black @python3 -c "import sys; exit(1 if sys.version_info >= (3,6) else 0)" || \ LC_ALL=C.UTF-8 LANG=C.UTF-8 .venv/bin/black \ --exclude="build/|buck-out/|dist/|_build/|\.git/|\.hg/|\.mypy_cache/|\.nox/|\.tox/|\.venv/|src/rebar/pr2relnotes.py|src/fauxton" \ - . dev/run test/javascript/run src/mango src/docs + build-aux/*.py dev/run test/javascript/run src/mango/test/*.py src/docs/src/conf.py src/docs/ext/*.py . .PHONY: elixir elixir: export MIX_ENV=integration elixir: export COUCHDB_TEST_ADMIN_PARTY_OVERRIDE=1 elixir: elixir-init elixir-check-formatted elixir-credo devclean - @dev/run "$(TEST_OPTS)" -a adm:pass -n 1 --enable-erlang-views --no-eval 'mix test --trace --exclude without_quorum_test --exclude with_quorum_test $(EXUNIT_OPTS)' + @dev/run "$(TEST_OPTS)" -a adm:pass -n 1 \ + --enable-erlang-views \ + --locald-config test/elixir/test/config/test-config.ini \ + --no-eval 'mix test --trace --exclude without_quorum_test --exclude with_quorum_test $(EXUNIT_OPTS)' .PHONY: elixir-init elixir-init: MIX_ENV=test @@ -257,7 +263,9 @@ elixir-credo: elixir-init .PHONY: javascript # target: javascript - Run JavaScript test suites or specific ones defined by suites option javascript: export COUCHDB_TEST_ADMIN_PARTY_OVERRIDE=1 -javascript: devclean +javascript: + + @$(MAKE) devclean @mkdir -p share/www/script/test ifeq ($(IN_RELEASE), true) @cp test/javascript/tests/lorem*.txt share/www/script/test/ @@ -477,7 +485,7 @@ endif # target: devclean - Remove dev cluster artifacts devclean: @rm -rf dev/lib/*/data - + @rm -rf dev/lib/*/etc ################################################################################ # Misc diff --git a/Makefile.win b/Makefile.win index 30ebe0ee32f..2e0dcaadebb 100644 --- a/Makefile.win +++ b/Makefile.win @@ -134,8 +134,8 @@ fauxton: share\www .PHONY: check # target: check - Test everything check: all python-black + @$(MAKE) emilio @$(MAKE) eunit - @$(MAKE) javascript @$(MAKE) mango-test @$(MAKE) elixir @@ -175,6 +175,9 @@ just-eunit: export ERL_AFLAGS = "-config $(shell echo %cd%)/rel/files/eunit.conf just-eunit: @$(REBAR) -r eunit $(EUNIT_OPTS) +emilio: + @bin\emilio -c emilio.config src\ | python.exe bin\warnings_in_scope -s 3 + .venv/bin/black: @python.exe -m venv .venv @.venv\Scripts\pip3.exe install black || copy /b .venv\Scripts\black.exe +,, @@ -186,7 +189,7 @@ python-black: .venv/bin/black @python.exe -c "import sys; exit(1 if sys.version_info >= (3,6) else 0)" || \ .venv\Scripts\black.exe --check \ --exclude="build/|buck-out/|dist/|_build/|\.git/|\.hg/|\.mypy_cache/|\.nox/|\.tox/|\.venv/|src/rebar/pr2relnotes.py|src/fauxton" \ - . dev\run test\javascript\run src\mango src\docs + build-aux dev\run test\javascript\run src\mango\test src\docs\src\conf.py src\docs\ext . python-black-update: .venv/bin/black @python.exe -c "import sys; exit(1 if sys.version_info < (3,6) else 0)" || \ @@ -194,13 +197,15 @@ python-black-update: .venv/bin/black @python.exe -c "import sys; exit(1 if sys.version_info >= (3,6) else 0)" || \ .venv\Scripts\black.exe \ --exclude="build/|buck-out/|dist/|_build/|\.git/|\.hg/|\.mypy_cache/|\.nox/|\.tox/|\.venv/|src/rebar/pr2relnotes.py|src/fauxton" \ - . dev\run test\javascript\run src\mango src\docs + build-aux dev\run test\javascript\run src\mango\test src\docs\src\conf.py src\docs\ext . .PHONY: elixir elixir: export MIX_ENV=integration elixir: export COUCHDB_TEST_ADMIN_PARTY_OVERRIDE=1 elixir: elixir-init elixir-check-formatted elixir-credo devclean - @dev\run $(TEST_OPTS) -a adm:pass -n 1 --enable-erlang-views --no-eval 'mix test --trace --exclude without_quorum_test --exclude with_quorum_test $(EXUNIT_OPTS)' + @dev\run $(TEST_OPTS) -a adm:pass -n 1 --enable-erlang-views \ + --locald-config test/elixir/test/config/test-config.ini \ + --no-eval 'mix test --trace --exclude without_quorum_test --exclude with_quorum_test $(EXUNIT_OPTS)' .PHONY: elixir-init elixir-init: MIX_ENV=test @@ -235,7 +240,8 @@ elixir-credo: elixir-init .PHONY: javascript # target: javascript - Run JavaScript test suites or specific ones defined by suites option javascript: export COUCHDB_TEST_ADMIN_PARTY_OVERRIDE=1 -javascript: devclean +javascript: + @$(MAKE) devclean -@mkdir share\www\script\test ifeq ($(IN_RELEASE), true) @copy test\javascript\tests\lorem*.txt share\www\script\test @@ -356,7 +362,7 @@ install: release @echo . @echo To install CouchDB into your system, copy the rel\couchdb @echo to your desired installation location. For example: - @echo xcopy /E rel\couchdb C:\CouchDB\ + @echo xcopy /E rel\couchdb C:\CouchDB\ @echo . ################################################################################ @@ -405,6 +411,9 @@ devclean: -@rmdir /s/q dev\lib\node1\data -@rmdir /s/q dev\lib\node2\data -@rmdir /s/q dev\lib\node3\data + -@rmdir /s/q dev\lib\node1\etc + -@rmdir /s/q dev\lib\node2\etc + -@rmdir /s/q dev\lib\node3\etc ################################################################################ diff --git a/README.rst b/README.rst index 47ce32e19b2..c527913b519 100644 --- a/README.rst +++ b/README.rst @@ -60,7 +60,9 @@ Run a basic test suite for CouchDB by browsing here: Getting started with developing ------------------------------- -For more detail, read the README-DEV.rst file in this directory. +For more detail, read the README-DEV.rst_ file in this directory. + +.. _README-DEV.rst: https://github.com/apache/couchdb/blob/master/README-DEV.rst Basically you just have to install the needed dependencies which are documented in the install docs and then run ``./configure && make``. @@ -72,9 +74,11 @@ layer in front of this cluster by running ``./dev/run --with-haproxy listening on port 5984. For Fauxton developers fixing the admin-party does not work via the button in -Fauxton. To fix the admin party you have to run ``./dev/run`` with the ``admin`` -flag, e.g. ``./dev/run --admin=username:password``. If you want to have an -admin-party, just omit the flag. +Fauxton. If you run ``./dev/run``, an admin user ``root`` with a random password +is generated (see the output of the script). If you want to set an admin user, +start with the admin flag, e.g. ``./dev/run --admin=username:password``. If you +want to have an admin-party, run ``./dev/run --with-admin-party-please``. To see +all available options, please check ``./dev/run --help``. Contributing to CouchDB ----------------------- diff --git a/bin/warnings_in_scope b/bin/warnings_in_scope new file mode 100755 index 00000000000..2a854211a2b --- /dev/null +++ b/bin/warnings_in_scope @@ -0,0 +1,125 @@ +#!/usr/bin/env python3 +import os +import subprocess +from pathlib import Path +import optparse +import sys +import re + +def run(command, cwd=None): + try: + return subprocess.Popen( + command, shell=True, cwd=cwd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + except OSError as err: + raise OSError("Error in command '{0}': {1}".format(command, err)) + +def parse_location(line): + # take substring between @@ + # take second part of it + location = line.split(b'@@')[1].strip().split(b' ')[1] + tokens = location.split(b',') + if len(tokens) == 1: + return (int(tokens[0][1:]), 1) + elif len(tokens) == 2: + return (int(tokens[0][1:]), int(tokens[1])) + +def changed_files(directory, scope): + result = {} + proc = run('git diff --no-prefix --unified={0}'.format(scope), cwd=str(directory)) + file_path = None + for line in iter(proc.stdout.readline, b''): + if line.startswith(b'diff --git '): + # this would be problematic if directory has space in the name + file_name = line.split(b' ')[3].strip() + file_path = str(directory.joinpath(str(file_name, 'utf-8'))) + result[file_path] = set() + continue + if line.startswith(b'@@'): + start_pos, number_of_lines = parse_location(line) + for line_number in range(start_pos, start_pos + number_of_lines): + result[file_path].add(line_number) + return result + +def print_changed(file_name, line_number): + print('{0}:{1}'.format(str(file_name), str(line_number))) + +def changes(dirs, scope): + result = {} + for directory in dirs: + result.update(changed_files(directory, scope)) + return result + +def repositories(root): + for directory in Path(root).rglob('.git'): + if not directory.is_dir(): + continue + yield directory.parent + +def setup_argparse(): + parser = optparse.OptionParser(description="Filter output to remove unrelated warning") + parser.add_option( + "-r", + "--regexp", + dest="regexp", + default='(?P[^:]+):(?P\d+).*', + help="Regexp used to extract file_name and line number", + ) + parser.add_option( + "-s", + "--scope", + dest="scope", + default=0, + help="Number of lines surrounding the change we consider relevant", + ) + parser.add_option( + "-p", + "--print-only", + action="store_true", + dest="print_only", + default=False, + help="Print changed lines only", + ) + return parser.parse_args() + +def filter_stdin(regexp, changes): + any_matches = False + for line in iter(sys.stdin.readline, ''): + matches = re.match(regexp, line) + if matches: + file_name = matches.group('file_name') + line_number = int(matches.group('line')) + if file_name in changes and line_number in changes[file_name]: + print(line, end='') + any_matches = True + return any_matches + +def validate_regexp(regexp): + index = regexp.groupindex + if 'file_name' in index and 'line' in index: + return True + else: + raise TypeError("Regexp must define following groups:\n - file_name\n - line") + +def main(): + opts, args = setup_argparse() + if opts.print_only: + for file_name, changed_lines in changes(repositories('.'), opts.scope).items(): + for line_number in changed_lines: + print_changed(file_name, line_number) + return 0 + else: + regexp = re.compile(opts.regexp) + validate_regexp(regexp) + if filter_stdin(regexp, changes(repositories('.'), opts.scope)): + return 1 + else: + return 0 + +if __name__ == "__main__": + try: + sys.exit(main()) + except KeyboardInterrupt: + pass + diff --git a/build-aux/Jenkinsfile.full b/build-aux/Jenkinsfile.full index b1d46e846a4..d7a7657b4f1 100644 --- a/build-aux/Jenkinsfile.full +++ b/build-aux/Jenkinsfile.full @@ -33,7 +33,7 @@ mkdir couchdb cp ${WORKSPACE}/apache-couchdb-*.tar.gz couchdb tar -xf ${WORKSPACE}/apache-couchdb-*.tar.gz -C couchdb cd couchdb-pkg -make ${platform} PLATFORM=${platform} +make ''' cleanup_and_save = ''' @@ -158,6 +158,43 @@ pipeline { } // post } // stage FreeBSD + stage('macOS') { + agent { + label 'macos' + } + steps { + // deleteDir is OK here because we're not inside of a Docker container! + deleteDir() + unstash 'tarball' + withEnv(['HOME='+pwd()]) { + sh ''' + PATH=/usr/local/bin:$PATH + export PATH + mkdir -p $COUCHDB_IO_LOG_DIR + + # Build CouchDB from tarball & test + mkdir build + cd build + tar -xzf $WORKSPACE/apache-couchdb-*.tar.gz + cd apache-couchdb-* + ./configure --with-curl --spidermonkey-version 60 + make check || (build-aux/logfile-uploader.py && false) + + # No package build for macOS at this time + ''' + } // withEnv + } // steps + post { + always { + junit '**/.eunit/*.xml, **/_build/*/lib/couchdbtest/*.xml, **/src/mango/nosetests.xml, **/test/javascript/junit.xml' + } + cleanup { + sh 'killall -9 beam.smp || true' + sh 'rm -rf ${WORKSPACE}/* ${COUCHDB_IO_LOG_DIR} || true' + } + } // post + } // stage macOS + stage('CentOS 6') { agent { docker { @@ -258,7 +295,7 @@ pipeline { } environment { platform = 'centos8' - sm_ver = '1.8.5' + sm_ver = '60' } stages { stage('Build from tarball & test') { @@ -380,18 +417,18 @@ pipeline { } // post } // stage - stage('Debian Stretch') { + stage('Ubuntu Focal') { agent { docker { - image 'couchdbdev/debian-stretch-erlang-20.3.8.25-1:latest' + image 'couchdbdev/ubuntu-focal-erlang-20.3.8.25-1:latest' label 'docker' alwaysPull true args "${DOCKER_ARGS}" } } environment { - platform = 'stretch' - sm_ver = '1.8.5' + platform = 'focal' + sm_ver = '68' } stages { stage('Build from tarball & test') { @@ -424,18 +461,18 @@ pipeline { } // post } // stage - stage('Debian Buster amd64') { + stage('Debian Stretch') { agent { docker { - image 'couchdbdev/debian-buster-erlang-20.3.8.25-1:latest' + image 'couchdbdev/debian-stretch-erlang-20.3.8.25-1:latest' label 'docker' alwaysPull true args "${DOCKER_ARGS}" } } environment { - platform = 'buster' - sm_ver = '60' + platform = 'stretch' + sm_ver = '1.8.5' } stages { stage('Build from tarball & test') { @@ -468,18 +505,18 @@ pipeline { } // post } // stage - stage('Debian Buster arm64v8') { + stage('Debian Buster amd64') { agent { docker { - image 'couchdbdev/arm64v8-debian-buster-erlang-20.3.8.25-1:latest' - label 'arm64v8' + image 'couchdbdev/debian-buster-erlang-20.3.8.25-1:latest' + label 'docker' alwaysPull true args "${DOCKER_ARGS}" } } environment { platform = 'buster' - sm_ver = '1.8.5' + sm_ver = '60' } stages { stage('Build from tarball & test') { @@ -512,18 +549,18 @@ pipeline { } // post } // stage - stage('Debian Buster ppc64le') { + stage('Debian Buster arm64v8') { agent { docker { - image 'couchdbdev/ppc64le-debian-buster-erlang-20.3.8.25-1:latest' - label 'ppc64le' + image 'couchdbdev/arm64v8-debian-buster-erlang-20.3.8.25-1:latest' + label 'arm64v8' alwaysPull true args "${DOCKER_ARGS}" } } environment { platform = 'buster' - sm_ver = '60' + sm_ver = '1.8.5' } stages { stage('Build from tarball & test') { @@ -556,6 +593,54 @@ pipeline { } // post } // stage +/* + - Removed 2020.09.15 - VMs are offline +*/ + +// stage('Debian Buster ppc64le') { +// agent { +// docker { +// image 'couchdbdev/ppc64le-debian-buster-erlang-20.3.8.25-1:latest' +// label 'ppc64le' +// alwaysPull true +// args "${DOCKER_ARGS}" +// } +// } +// environment { +// platform = 'buster' +// sm_ver = '60' +// } +// stages { +// stage('Build from tarball & test') { +// steps { +// unstash 'tarball' +// sh( script: build_and_test ) +// } +// post { +// always { +// junit '**/.eunit/*.xml, **/_build/*/lib/couchdbtest/*.xml, **/src/mango/nosetests.xml, **/test/javascript/junit.xml' +// } +// } +// } +// stage('Build CouchDB packages') { +// steps { +// sh( script: make_packages ) +// sh( script: cleanup_and_save ) +// } +// post { +// success { +// archiveArtifacts artifacts: 'pkgs/**', fingerprint: true +// } +// } +// } +// } // stages +// post { +// cleanup { +// sh 'rm -rf ${WORKSPACE}/*' +// } +// } // post +// } // stage + /* * Example of how to do a qemu-based run, please leave here */ @@ -646,12 +731,12 @@ pipeline { unstash 'tarball' unarchive mapping: ['pkgs/' : '.'] - echo 'Retrieving & cleaning current couchdb-vm2 tree...' + echo 'Retrieving & cleaning current repo-nightly tree...' sh ''' - rsync -avz -e "ssh -o StrictHostKeyChecking=no -i $KEY" jenkins@couchdb-vm2.apache.org:/var/www/html/$BRANCH_NAME . || mkdir -p $BRANCH_NAME + rsync -avz -e "ssh -o StrictHostKeyChecking=no -i $KEY" jenkins@repo-nightly.couchdb.org:/var/www/html/$BRANCH_NAME . || mkdir -p $BRANCH_NAME rm -rf $BRANCH_NAME/debian/* $BRANCH_NAME/el6/* $BRANCH_NAME/el7/* $BRANCH_NAME/el8/* mkdir -p $BRANCH_NAME/debian $BRANCH_NAME/el6 $BRANCH_NAME/el7 $BRANCH_NAME/el8 $BRANCH_NAME/source - rsync -avz -e "ssh -o StrictHostKeyChecking=no -i $KEY" jenkins@couchdb-vm2.apache.org:/var/www/html/js . + rsync -avz -e "ssh -o StrictHostKeyChecking=no -i $KEY" jenkins@repo-nightly.couchdb.org:/var/www/html/js . ''' echo 'Building Debian repo...' @@ -660,11 +745,12 @@ pipeline { cp js/debian-stretch/*.deb pkgs/stretch reprepro -b couchdb-pkg/repo includedeb stretch pkgs/stretch/*.deb cp js/debian-buster/*.deb pkgs/stretch - reprepro -b couchdb-pkg/repo includedeb stretch pkgs/buster/*.deb + reprepro -b couchdb-pkg/repo includedeb buster pkgs/buster/*.deb cp js/ubuntu-xenial/*.deb pkgs/xenial reprepro -b couchdb-pkg/repo includedeb xenial pkgs/xenial/*.deb cp js/ubuntu-bionic/*.deb pkgs/bionic reprepro -b couchdb-pkg/repo includedeb bionic pkgs/bionic/*.deb + reprepro -b couchdb-pkg/repo includedeb focal pkgs/focal/*.deb ''' echo 'Building CentOS repos...' @@ -690,9 +776,9 @@ pipeline { cd ../.. ''' - echo 'rsyncing tree to couchdb-vm2...' + echo 'rsyncing tree to repo-nightly...' sh ''' - rsync -avz --delete -e "ssh -o StrictHostKeyChecking=no -i $KEY" $BRANCH_NAME jenkins@couchdb-vm2.apache.org:/var/www/html + rsync -avz --delete -e "ssh -o StrictHostKeyChecking=no -i $KEY" $BRANCH_NAME jenkins@repo-nightly.couchdb.org:/var/www/html rm -rf $BRANCH_NAME couchdb-pkg *.tar.gz ''' } // withCredentials diff --git a/build-aux/logfile-uploader.py b/build-aux/logfile-uploader.py index 148704ceefe..3df9e6c8153 100755 --- a/build-aux/logfile-uploader.py +++ b/build-aux/logfile-uploader.py @@ -22,7 +22,7 @@ import requests -COUCH_URL = "https://couchdb-vm2.apache.org/ci_errorlogs" +COUCH_URL = "https://logs.couchdb.org/ci_errorlogs" TARFILE = "couchlog.tar.gz" diff --git a/configure b/configure index 38e62e31744..854366c8a93 100755 --- a/configure +++ b/configure @@ -255,12 +255,25 @@ install_local_rebar() { fi } +install_local_emilio() { + if [ ! -x "${rootdir}/bin/emilio" ]; then + if [ ! -d "${rootdir}/src/emilio" ]; then + git clone --depth 1 https://github.com/cloudant-labs/emilio ${rootdir}/src/emilio + fi + cd ${rootdir}/src/emilio && ${REBAR} compile escriptize; cd ${rootdir} + mv ${rootdir}/src/emilio/emilio ${rootdir}/bin/emilio + chmod +x ${rootdir}/bin/emilio + cd ${rootdir}/src/emilio && ${REBAR} clean; cd ${rootdir} + fi +} if [ -z "${REBAR}" ]; then install_local_rebar REBAR=${rootdir}/bin/rebar fi +install_local_emilio + # only update dependencies, when we are not in a release tarball if [ -d .git -a $SKIP_DEPS -ne 1 ]; then echo "==> updating dependencies" diff --git a/configure.ps1 b/configure.ps1 index c74fbcf41fe..65f8517d65a 100644 --- a/configure.ps1 +++ b/configure.ps1 @@ -205,6 +205,20 @@ if ((Get-Command "rebar.cmd" -ErrorAction SilentlyContinue) -eq $null) $env:Path += ";$rootdir\bin" } +# check for emilio; if not found, get it and build it +if ((Get-Command "emilio.cmd" -ErrorAction SilentlyContinue) -eq $null) +{ + Write-Verbose "==> emilio.cmd not found; bootstrapping..." + if (-Not (Test-Path "src\emilio")) + { + git clone --depth 1 https://github.com/wohali/emilio $rootdir\src\emilio + } + cmd /c "cd $rootdir\src\emilio && rebar compile escriptize; cd $rootdir" + cp $rootdir\src\emilio\emilio $rootdir\bin\emilio + cp $rootdir\src\emilio\bin\emilio.cmd $rootdir\bin\emilio.cmd + cmd /c "cd $rootdir\src\emilio && rebar clean; cd $rootdir" +} + # only update dependencies, when we are not in a release tarball if ( (Test-Path .git -PathType Container) -and (-not $SkipDeps) ) { Write-Verbose "==> updating dependencies" diff --git a/dev/run b/dev/run index a96817d833b..6d8bc5201d9 100755 --- a/dev/run +++ b/dev/run @@ -211,6 +211,14 @@ def get_args_parser(): default=None, help="Extra arguments to pass to beam process", ) + parser.add_option( + "-l", + "--locald-config", + dest="locald_configs", + action="append", + default=[], + help="Path to config to place in 'local.d'. Can be repeated", + ) return parser @@ -238,6 +246,7 @@ def setup_context(opts, args): "reset_logs": True, "procs": [], "auto_ports": opts.auto_ports, + "locald_configs": opts.locald_configs, } @@ -279,9 +288,24 @@ def setup_configs(ctx): "_default": "", } write_config(ctx, node, env) + write_locald_configs(ctx, node, env) generate_haproxy_config(ctx) +def write_locald_configs(ctx, node, env): + for locald_config in ctx["locald_configs"]: + config_src = os.path.join(ctx["rootdir"], locald_config) + if os.path.exists(config_src): + config_filename = os.path.basename(config_src) + config_tgt = os.path.join( + ctx["devdir"], "lib", node, "etc", "local.d", config_filename + ) + with open(config_src) as handle: + content = handle.read() + with open(config_tgt, "w") as handle: + handle.write(content) + + def generate_haproxy_config(ctx): haproxy_config = os.path.join(ctx["devdir"], "lib", "haproxy.cfg") template = os.path.join(ctx["rootdir"], "rel", "haproxy.cfg") @@ -382,6 +406,8 @@ def write_config(ctx, node, env): with open(tgt, "w") as handle: handle.write(content) + ensure_dir_exists(etc_tgt, "local.d") + def boot_haproxy(ctx): if not ctx["with_haproxy"]: @@ -401,7 +427,10 @@ def boot_haproxy(ctx): def hack_default_ini(ctx, node, contents): contents = re.sub( - "^\[httpd\]$", "[httpd]\nenable = true", contents, flags=re.MULTILINE, + "^\[httpd\]$", + "[httpd]\nenable = true", + contents, + flags=re.MULTILINE, ) if ctx["enable_erlang_views"]: @@ -580,6 +609,7 @@ def boot_node(ctx, node): "-couch_ini", os.path.join(node_etcdir, "default.ini"), os.path.join(node_etcdir, "local.ini"), + os.path.join(node_etcdir, "local.d"), "-reltool_config", os.path.join(reldir, "reltool.config"), "-parent_pid", diff --git a/emilio.config b/emilio.config new file mode 100644 index 00000000000..0dad9389898 --- /dev/null +++ b/emilio.config @@ -0,0 +1,20 @@ +{ignore, [ + "src[\/]bear[\/]*", + "src[\/]b64url[\/]*", + "src[\/]docs[\/]*", + "src[\/]*[\/].eunit[\/]*", + "src[\/]fauxton[\/]*", + "src[\/]rebar[\/]*", + "src[\/]emilio[\/]*", + "src[\/]folsom[\/]*", + "src[\/]mochiweb[\/]*", + "src[\/]snappy[\/]*", + "src[\/]ssl_verify_fun[\/]*", + "src[\/]ibrowse[\/]*", + "src[\/]jiffy[\/]*", + "src[\/]meck[\/]*", + "src[\/]proper[\/]*", + "src[\/]recon[\/]*", + "src[\/]hyper[\/]*", + "src[\/]triq[\/]*" +]}. diff --git a/mix.exs b/mix.exs index d717e4b4aa0..ae42af5d6e5 100644 --- a/mix.exs +++ b/mix.exs @@ -65,10 +65,12 @@ defmodule CouchDBTest.Mixfile do {:junit_formatter, "~> 3.0", only: [:dev, :test, :integration]}, {:httpotion, ">= 3.1.3", only: [:dev, :test, :integration], runtime: false}, {:excoveralls, "~> 0.12", only: :test}, + {:b64url, path: Path.expand("src/b64url", __DIR__)}, {:jiffy, path: Path.expand("src/jiffy", __DIR__)}, + {:jwtf, path: Path.expand("src/jwtf", __DIR__)}, {:ibrowse, path: Path.expand("src/ibrowse", __DIR__), override: true, compile: false}, - {:credo, "~> 1.0.0", only: [:dev, :test, :integration], runtime: false} + {:credo, "~> 1.4.0", only: [:dev, :test, :integration], runtime: false} ] end diff --git a/mix.lock b/mix.lock index 30134f20f02..8b6489f0ca0 100644 --- a/mix.lock +++ b/mix.lock @@ -1,13 +1,13 @@ %{ "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"}, "certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "805abd97539caf89ec6d4732c91e62ba9da0cda51ac462380bbd28ee697a8c42"}, - "credo": {:hex, :credo, "1.0.5", "fdea745579f8845315fe6a3b43e2f9f8866839cfbc8562bb72778e9fdaa94214", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "16105fac37c5c4b3f6e1f70ba0784511fec4275cd8bb979386e3c739cf4e6455"}, + "credo": {:hex, :credo, "1.4.0", "92339d4cbadd1e88b5ee43d427b639b68a11071b6f73854e33638e30a0ea11f5", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1fd3b70dce216574ce3c18bdf510b57e7c4c85c2ec9cad4bff854abaf7e58658"}, "excoveralls": {:hex, :excoveralls, "0.12.1", "a553c59f6850d0aff3770e4729515762ba7c8e41eedde03208182a8dc9d0ce07", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "5c1f717066a299b1b732249e736c5da96bb4120d1e55dc2e6f442d251e18a812"}, "hackney": {:hex, :hackney, "1.15.2", "07e33c794f8f8964ee86cebec1a8ed88db5070e52e904b8f12209773c1036085", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.5", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "e0100f8ef7d1124222c11ad362c857d3df7cb5f4204054f9f0f4a728666591fc"}, "httpotion": {:hex, :httpotion, "3.1.3", "fdaf1e16b9318dcb722de57e75ac368c93d4c6e3c9125f93e960f953a750fb77", [:mix], [{:ibrowse, "== 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: false]}], "hexpm", "e420172ef697a0f1f4dc40f89a319d5a3aad90ec51fa424f08c115f04192ae43"}, "ibrowse": {:hex, :ibrowse, "4.4.0", "2d923325efe0d2cb09b9c6a047b2835a5eda69d8a47ed6ff8bc03628b764e991", [:rebar3], [], "hexpm"}, "idna": {:hex, :idna, "6.0.0", "689c46cbcdf3524c44d5f3dde8001f364cd7608a99556d8fbd8239a5798d4c10", [:rebar3], [{:unicode_util_compat, "0.4.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "4bdd305eb64e18b0273864920695cb18d7a2021f31a11b9c5fbcd9a253f936e2"}, - "jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fdf843bca858203ae1de16da2ee206f53416bbda5dc8c9e78f43243de4bc3afe"}, + "jason": {:hex, :jason, "1.2.1", "12b22825e22f468c02eb3e4b9985f3d0cb8dc40b9bd704730efa11abd2708c44", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "b659b8571deedf60f79c5a608e15414085fa141344e2716fbd6988a084b5f993"}, "jiffy": {:hex, :jiffy, "0.15.2", "de266c390111fd4ea28b9302f0bc3d7472468f3b8e0aceabfbefa26d08cd73b7", [:rebar3], [], "hexpm"}, "junit_formatter": {:hex, :junit_formatter, "3.0.0", "13950d944dbd295da7d8cc4798b8faee808a8bb9b637c88069954eac078ac9da", [:mix], [], "hexpm", "d77b7b9a1601185b18dfe7682b27c46d5d12721f12fdc75180a6fc573b4e64b1"}, "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, diff --git a/rebar.config.script b/rebar.config.script index 2f7de3dc23e..d8afc10e3c0 100644 --- a/rebar.config.script +++ b/rebar.config.script @@ -132,13 +132,14 @@ SubDirs = [ "src/fabric", "src/global_changes", "src/ioq", + "src/jwtf", "src/ken", "src/mango", "src/rexi", "src/setup", "src/smoosh", "rel" -], +]. DepDescs = [ %% Independent Apps @@ -150,29 +151,29 @@ DepDescs = [ %% Non-Erlang deps {docs, {url, "https://github.com/apache/couchdb-documentation"}, - {tag, "3.0.0"}, [raw]}, + {tag, "3.1.0-RC1"}, [raw]}, {fauxton, {url, "https://github.com/apache/couchdb-fauxton"}, - {tag, "v1.2.2"}, [raw]}, + {tag, "v1.2.4"}, [raw]}, %% Third party deps {folsom, "folsom", {tag, "CouchDB-0.8.3"}}, {hyper, "hyper", {tag, "CouchDB-2.2.0-6"}}, {ibrowse, "ibrowse", {tag, "CouchDB-4.0.1-1"}}, -{jiffy, "jiffy", {tag, "CouchDB-1.0.3-1"}}, +{jiffy, "jiffy", {tag, "CouchDB-1.0.4-1"}}, {mochiweb, "mochiweb", {tag, "v2.20.0"}}, {meck, "meck", {tag, "0.8.8"}}, {recon, "recon", {tag, "2.5.0"}} -], +]. -WithProper = lists:keyfind(with_proper, 1, CouchConfig) == {with_proper, true}, +WithProper = lists:keyfind(with_proper, 1, CouchConfig) == {with_proper, true}. OptionalDeps = case WithProper of true -> [{proper, {url, "https://github.com/proper-testing/proper"}, {tag, "v1.3"}}]; false -> [] -end, +end. -BaseUrl = "https://github.com/apache/", +BaseUrl = "https://github.com/apache/". MakeDep = fun ({AppName, {url, Url}, Version}) -> @@ -185,12 +186,12 @@ MakeDep = fun ({AppName, RepoName, Version, Options}) -> Url = BaseUrl ++ "couchdb-" ++ RepoName ++ ".git", {AppName, ".*", {git, Url, Version}, Options} -end, +end. ErlOpts = case os:getenv("ERL_OPTS") of false -> []; Opts -> [list_to_atom(O) || O <- string:tokens(Opts, ",")] -end, +end. AddConfig = [ {require_otp_vsn, "19|20|21|22"}, @@ -205,11 +206,11 @@ AddConfig = [ {plt_location, local}, {plt_location, COUCHDB_ROOT}, {plt_extra_apps, [ - asn1, compiler, crypto, inets, kernel, os_mon, runtime_tools, + asn1, compiler, crypto, inets, kernel, runtime_tools, sasl, setup, ssl, stdlib, syntax_tools, xmerl]}, {warnings, [unmatched_returns, error_handling, race_conditions]}]}, {post_hooks, [{compile, "escript support/build_js.escript"}]} -], +]. C = lists:foldl(fun({K, V}, CfgAcc) -> lists:keystore(K, 1, CfgAcc, {K, V}) diff --git a/rel/files/couchdb.cmd.in b/rel/files/couchdb.cmd.in index 2504f8c60d2..244803bc80f 100644 --- a/rel/files/couchdb.cmd.in +++ b/rel/files/couchdb.cmd.in @@ -23,7 +23,7 @@ FOR /F "tokens=2" %%G IN ("%START_ERL%") DO SET APP_VSN=%%G set BINDIR=%ROOTDIR%/erts-%ERTS_VSN%/bin set EMU=beam set PROGNAME=%~n0 -set PATH=%PATH%;%COUCHDB_BIN_DIR% +set PATH=%COUCHDB_BIN_DIR%;%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem;%SYSTEMROOT%\System32\WindowsPowerShell\v1.0\ IF NOT DEFINED COUCHDB_QUERY_SERVER_JAVASCRIPT SET COUCHDB_QUERY_SERVER_JAVASCRIPT={{prefix}}/bin/couchjs {{prefix}}/share/server/main.js IF NOT DEFINED COUCHDB_QUERY_SERVER_COFFEESCRIPT SET COUCHDB_QUERY_SERVER_COFFEESCRIPT={{prefix}}/bin/couchjs {{prefix}}/share/server/main-coffee.js @@ -31,6 +31,7 @@ IF NOT DEFINED COUCHDB_FAUXTON_DOCROOT SET COUCHDB_FAUXTON_DOCROOT={{fauxton_roo "%BINDIR%\erl" -boot "%ROOTDIR%\releases\%APP_VSN%\couchdb" ^ -args_file "%ROOTDIR%\etc\vm.args" ^ +-epmd "%BINDIR%\epmd.exe" ^ -config "%ROOTDIR%\releases\%APP_VSN%\sys.config" %* :: EXIT /B diff --git a/rel/overlay/bin/remsh b/rel/overlay/bin/remsh index c5e932a8d5f..d1fcdd95f87 100755 --- a/rel/overlay/bin/remsh +++ b/rel/overlay/bin/remsh @@ -32,17 +32,28 @@ BINDIR=$ROOTDIR/erts-$ERTS_VSN/bin PROGNAME=${0##*/} VERBOSE="" -NODE="couchdb@127.0.0.1" +DEFAULT_NODE="couchdb@127.0.0.1" LHOST=127.0.0.1 -VM_ARGS=$COUCHDB_BIN_DIR/../etc/vm.args + +ARGS_FILE="${COUCHDB_ARGS_FILE:-$ROOTDIR/etc/vm.args}" + +# If present, extract cookie from ERL_FLAGS +# This is used by the CouchDB Dockerfile and Helm chart +NODE=$(echo "$ERL_FLAGS" | sed 's/^.*name \([^ ][^ ]*\).*$/\1/g') +if test -f "$ARGS_FILE"; then +# else attempt to extract from vm.args + ARGS_FILE_COOKIE=$(awk '$1=="-name"{print $2}' "$ARGS_FILE") + NODE="${NODE:-$ARGS_FILE_COOKIE}" +fi +NODE="${NODE:-$DEFAULT_NODE}" # If present, extract cookie from ERL_FLAGS # This is used by the CouchDB Dockerfile and Helm chart COOKIE=$(echo "$ERL_FLAGS" | sed 's/^.*setcookie \([^ ][^ ]*\).*$/\1/g') -if test -f "$VM_ARGS"; then +if test -f "$ARGS_FILE"; then # else attempt to extract from vm.args - VM_ARGS_COOKIE=$(awk '$1=="-setcookie"{print $2}' "$VM_ARGS") - COOKIE="${COOKIE:-$VM_ARGS_COOKIE}" + ARGS_FILE_COOKIE=$(awk '$1=="-setcookie"{print $2}' "$ARGS_FILE") + COOKIE="${COOKIE:-$ARGS_FILE_COOKIE}" fi COOKIE="${COOKIE:-monster}" diff --git a/rel/overlay/etc/default.ini b/rel/overlay/etc/default.ini index 1829d0d74ad..16d568fa949 100644 --- a/rel/overlay/etc/default.ini +++ b/rel/overlay/etc/default.ini @@ -73,6 +73,9 @@ default_engine = couch ; on startup if not present. ;single_node = false +; Allow edits on the _security object in the user db. By default, it's disabled. +users_db_security_editable = false + [purge] ; Allowed maximum number of documents in one purge request ;max_document_id_number = 100 @@ -84,9 +87,6 @@ default_engine = couch ; document. Default is 24 hours. ;index_lag_warn_seconds = 86400 -; Allow edits on the _security object in the user db. By default, it's disabled. -users_db_security_editable = false - [couchdb_engines] ; The keys in this section are the filename extension that ; the specified engine module will use. This is important so @@ -130,13 +130,40 @@ prefer_minimal = Cache-Control, Content-Length, Content-Range, Content-Type, ETa ; _dbs_info in a request max_db_number_for_dbs_info_req = 100 +; set to true to delay the start of a response until the end has been calculated +;buffer_response = false + ; authentication handlers ; authentication_handlers = {chttpd_auth, cookie_authentication_handler}, {chttpd_auth, default_authentication_handler} ; uncomment the next line to enable proxy authentication ; authentication_handlers = {chttpd_auth, proxy_authentication_handler}, {chttpd_auth, cookie_authentication_handler}, {chttpd_auth, default_authentication_handler} +; uncomment the next line to enable JWT authentication +; authentication_handlers = {chttpd_auth, jwt_authentication_handler}, {chttpd_auth, cookie_authentication_handler}, {chttpd_auth, default_authentication_handler} ; prevent non-admins from accessing /_all_dbs -;admin_only_all_dbs = false +; admin_only_all_dbs = true + +;[jwt_auth] +; List of claims to validate +; can be the name of a claim like "exp" or a tuple if the claim requires +; a parameter +; required_claims = exp, {iss, "IssuerNameHere"} +; roles_claim_name = https://example.com/roles +; +; [jwt_keys] +; Configure at least one key here if using the JWT auth handler. +; If your JWT tokens do not include a "kid" attribute, use "_default" +; as the config key, otherwise use the kid as the config key. +; Examples +; hmac:_default = aGVsbG8= +; hmac:foo = aGVsbG8= +; The config values can represent symmetric and asymmetrics keys. +; For symmetrics keys, the value is base64 encoded; +; hmac:_default = aGVsbG8= # base64-encoded form of "hello" +; For asymmetric keys, the value is the PEM encoding of the public +; key with newlines replaced with the escape sequence \n. +; rsa:foo = -----BEGIN PUBLIC KEY-----\nMIIBIjAN...IDAQAB\n-----END PUBLIC KEY-----\n +; ec:bar = -----BEGIN PUBLIC KEY-----\nMHYwEAYHK...AzztRs\n-----END PUBLIC KEY-----\n [couch_peruser] ; If enabled, couch_peruser ensures that a private per-user database diff --git a/rel/reltool.config b/rel/reltool.config index 5285504ba67..6acba378bd8 100644 --- a/rel/reltool.config +++ b/rel/reltool.config @@ -19,7 +19,6 @@ crypto, inets, kernel, - os_mon, runtime_tools, sasl, ssl, @@ -51,6 +50,7 @@ ibrowse, ioq, jiffy, + jwtf, ken, khash, mango, @@ -76,7 +76,6 @@ {app, crypto, [{incl_cond, include}]}, {app, inets, [{incl_cond, include}]}, {app, kernel, [{incl_cond, include}]}, - {app, os_mon, [{incl_cond, include}]}, {app, public_key, [{incl_cond, include}]}, {app, runtime_tools, [{incl_cond, include}]}, {app, sasl, [{incl_cond, include}]}, @@ -110,6 +109,7 @@ {app, ibrowse, [{incl_cond, include}]}, {app, ioq, [{incl_cond, include}]}, {app, jiffy, [{incl_cond, include}]}, + {app, jwtf, [{incl_cond, include}]}, {app, ken, [{incl_cond, include}]}, {app, khash, [{incl_cond, include}]}, {app, mango, [{incl_cond, include}]}, diff --git a/src/chttpd/src/chttpd.erl b/src/chttpd/src/chttpd.erl index adde0730f69..3962c8601c7 100644 --- a/src/chttpd/src/chttpd.erl +++ b/src/chttpd/src/chttpd.erl @@ -52,8 +52,9 @@ req, code, headers, - first_chunk, - resp=nil + chunks, + resp=nil, + buffer_response=false }). start_link() -> @@ -780,40 +781,54 @@ start_json_response(Req, Code, Headers0) -> end_json_response(Resp) -> couch_httpd:end_json_response(Resp). + start_delayed_json_response(Req, Code) -> start_delayed_json_response(Req, Code, []). + start_delayed_json_response(Req, Code, Headers) -> start_delayed_json_response(Req, Code, Headers, ""). + start_delayed_json_response(Req, Code, Headers, FirstChunk) -> {ok, #delayed_resp{ start_fun = fun start_json_response/3, req = Req, code = Code, headers = Headers, - first_chunk = FirstChunk}}. + chunks = [FirstChunk], + buffer_response = buffer_response(Req)}}. + start_delayed_chunked_response(Req, Code, Headers) -> start_delayed_chunked_response(Req, Code, Headers, ""). + start_delayed_chunked_response(Req, Code, Headers, FirstChunk) -> {ok, #delayed_resp{ start_fun = fun start_chunked_response/3, req = Req, code = Code, headers = Headers, - first_chunk = FirstChunk}}. + chunks = [FirstChunk], + buffer_response = buffer_response(Req)}}. + -send_delayed_chunk(#delayed_resp{}=DelayedResp, Chunk) -> +send_delayed_chunk(#delayed_resp{buffer_response=false}=DelayedResp, Chunk) -> {ok, #delayed_resp{resp=Resp}=DelayedResp1} = start_delayed_response(DelayedResp), {ok, Resp} = send_chunk(Resp, Chunk), - {ok, DelayedResp1}. + {ok, DelayedResp1}; + +send_delayed_chunk(#delayed_resp{buffer_response=true}=DelayedResp, Chunk) -> + #delayed_resp{chunks = Chunks} = DelayedResp, + {ok, DelayedResp#delayed_resp{chunks = [Chunk | Chunks]}}. + send_delayed_last_chunk(Req) -> send_delayed_chunk(Req, []). + send_delayed_error(#delayed_resp{req=Req,resp=nil}=DelayedResp, Reason) -> {Code, ErrorStr, ReasonStr} = error_info(Reason), {ok, Resp} = send_error(Req, Code, ErrorStr, ReasonStr), @@ -823,6 +838,7 @@ send_delayed_error(#delayed_resp{resp=Resp, req=Req}, Reason) -> log_error_with_stack_trace(Reason), throw({http_abort, Resp, Reason}). + close_delayed_json_object(Resp, Buffer, Terminator, 0) -> % Use a separate chunk to close the streamed array to maintain strict % compatibility with earlier versions. See COUCHDB-2724 @@ -831,11 +847,28 @@ close_delayed_json_object(Resp, Buffer, Terminator, 0) -> close_delayed_json_object(Resp, Buffer, Terminator, _Threshold) -> send_delayed_chunk(Resp, [Buffer | Terminator]). -end_delayed_json_response(#delayed_resp{}=DelayedResp) -> + +end_delayed_json_response(#delayed_resp{buffer_response=false}=DelayedResp) -> {ok, #delayed_resp{resp=Resp}} = start_delayed_response(DelayedResp), + end_json_response(Resp); + +end_delayed_json_response(#delayed_resp{buffer_response=true}=DelayedResp) -> + #delayed_resp{ + start_fun = StartFun, + req = Req, + code = Code, + headers = Headers, + chunks = Chunks + } = DelayedResp, + {ok, Resp} = StartFun(Req, Code, Headers), + lists:foreach(fun + ([]) -> ok; + (Chunk) -> send_chunk(Resp, Chunk) + end, lists:reverse(Chunks)), end_json_response(Resp). + get_delayed_req(#delayed_resp{req=#httpd{mochi_req=MochiReq}}) -> MochiReq; get_delayed_req(Resp) -> @@ -847,7 +880,7 @@ start_delayed_response(#delayed_resp{resp=nil}=DelayedResp) -> req=Req, code=Code, headers=Headers, - first_chunk=FirstChunk + chunks=[FirstChunk] }=DelayedResp, {ok, Resp} = StartFun(Req, Code, Headers), case FirstChunk of @@ -858,6 +891,18 @@ start_delayed_response(#delayed_resp{resp=nil}=DelayedResp) -> start_delayed_response(#delayed_resp{}=DelayedResp) -> {ok, DelayedResp}. + +buffer_response(Req) -> + case chttpd:qs_value(Req, "buffer_response") of + "false" -> + false; + "true" -> + true; + _ -> + config:get_boolean("chttpd", "buffer_response", false) + end. + + error_info({Error, Reason}) when is_list(Reason) -> error_info({Error, couch_util:to_binary(Reason)}); error_info(bad_request) -> diff --git a/src/chttpd/src/chttpd_auth.erl b/src/chttpd/src/chttpd_auth.erl index 607f09a8a7b..ffae78171b1 100644 --- a/src/chttpd/src/chttpd_auth.erl +++ b/src/chttpd/src/chttpd_auth.erl @@ -18,6 +18,7 @@ -export([default_authentication_handler/1]). -export([cookie_authentication_handler/1]). -export([proxy_authentication_handler/1]). +-export([jwt_authentication_handler/1]). -export([party_mode_handler/1]). -export([handle_session_req/1]). @@ -51,22 +52,30 @@ cookie_authentication_handler(Req) -> proxy_authentication_handler(Req) -> couch_httpd_auth:proxy_authentication_handler(Req). +jwt_authentication_handler(Req) -> + couch_httpd_auth:jwt_authentication_handler(Req). + party_mode_handler(#httpd{method='POST', path_parts=[<<"_session">>]} = Req) -> % See #1947 - users should always be able to attempt a login Req#httpd{user_ctx=#user_ctx{}}; +party_mode_handler(#httpd{path_parts=[<<"_up">>]} = Req) -> + RequireValidUser = config:get_boolean("chttpd", "require_valid_user", false), + RequireValidUserExceptUp = config:get_boolean("chttpd", "require_valid_user_except_for_up", false), + require_valid_user(Req, RequireValidUser andalso not RequireValidUserExceptUp); + party_mode_handler(Req) -> RequireValidUser = config:get_boolean("chttpd", "require_valid_user", false), - ExceptUp = config:get_boolean("chttpd", "require_valid_user_except_for_up", true), - case RequireValidUser andalso not ExceptUp of - true -> - throw({unauthorized, <<"Authentication required.">>}); - false -> - case config:get("admins") of + RequireValidUserExceptUp = config:get_boolean("chttpd", "require_valid_user_except_for_up", false), + require_valid_user(Req, RequireValidUser orelse RequireValidUserExceptUp). + +require_valid_user(_Req, true) -> + throw({unauthorized, <<"Authentication required.">>}); +require_valid_user(Req, false) -> + case config:get("admins") of [] -> Req#httpd{user_ctx = ?ADMIN_USER}; _ -> Req#httpd{user_ctx=#user_ctx{}} - end end. handle_session_req(Req) -> diff --git a/src/chttpd/src/chttpd_auth_request.erl b/src/chttpd/src/chttpd_auth_request.erl index fa47f5bfa80..8040f91fd1e 100644 --- a/src/chttpd/src/chttpd_auth_request.erl +++ b/src/chttpd/src/chttpd_auth_request.erl @@ -34,7 +34,7 @@ authorize_request_int(#httpd{path_parts=[]}=Req) -> authorize_request_int(#httpd{path_parts=[<<"favicon.ico">>|_]}=Req) -> Req; authorize_request_int(#httpd{path_parts=[<<"_all_dbs">>|_]}=Req) -> - case config:get_boolean("chttpd", "admin_only_all_dbs", false) of + case config:get_boolean("chttpd", "admin_only_all_dbs", true) of true -> require_admin(Req); false -> Req end; diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl index 6a3df6defed..b9954603a43 100644 --- a/src/chttpd/src/chttpd_db.erl +++ b/src/chttpd/src/chttpd_db.erl @@ -383,17 +383,10 @@ handle_design_info_req(Req, _Db, _DDoc) -> create_db_req(#httpd{}=Req, DbName) -> couch_httpd:verify_is_server_admin(Req), - N = chttpd:qs_value(Req, "n", config:get("cluster", "n", "3")), - Q = chttpd:qs_value(Req, "q", config:get("cluster", "q", "8")), - P = chttpd:qs_value(Req, "placement", config:get("cluster", "placement")), + ShardsOpt = parse_shards_opt(Req), EngineOpt = parse_engine_opt(Req), DbProps = parse_partitioned_opt(Req), - Options = [ - {n, N}, - {q, Q}, - {placement, P}, - {props, DbProps} - ] ++ EngineOpt, + Options = lists:append([ShardsOpt, [{props, DbProps}], EngineOpt]), DocUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)), case fabric:create_db(DbName, Options) of ok -> @@ -1115,7 +1108,7 @@ db_doc_req(#httpd{method='COPY', user_ctx=Ctx}=Req, Db, SourceDocId) -> send_json(Req, HttpCode, [{"Location", Loc}, {"ETag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewTargetRev)) ++ "\""}], - {[{ok, true}] ++ PartRes}); + {PartRes}); db_doc_req(Req, _Db, _DocId) -> send_method_not_allowed(Req, "DELETE,GET,HEAD,POST,PUT,COPY"). @@ -1702,6 +1695,40 @@ get_md5_header(Req) -> parse_doc_query(Req) -> lists:foldl(fun parse_doc_query/2, #doc_query_args{}, chttpd:qs(Req)). +parse_shards_opt(Req) -> + [ + {n, parse_shards_opt("n", Req, config:get("cluster", "n", "3"))}, + {q, parse_shards_opt("q", Req, config:get("cluster", "q", "8"))}, + {placement, parse_shards_opt( + "placement", Req, config:get("cluster", "placement"))} + ]. + +parse_shards_opt("placement", Req, Default) -> + Err = <<"The `placement` value should be in a format `zone:n`.">>, + case chttpd:qs_value(Req, "placement", Default) of + Default -> Default; + [] -> throw({bad_request, Err}); + Val -> + try + true = lists:all(fun(Rule) -> + [_, N] = string:tokens(Rule, ":"), + couch_util:validate_positive_int(N) + end, string:tokens(Val, ",")), + Val + catch _:_ -> + throw({bad_request, Err}) + end + end; + +parse_shards_opt(Param, Req, Default) -> + Val = chttpd:qs_value(Req, Param, Default), + Err = ?l2b(["The `", Param, "` value should be a positive integer."]), + case couch_util:validate_positive_int(Val) of + true -> Val; + false -> throw({bad_request, Err}) + end. + + parse_engine_opt(Req) -> case chttpd:qs_value(Req, "engine") of undefined -> @@ -2118,8 +2145,26 @@ parse_partitioned_opt_test_() -> ] }. +parse_shards_opt_test_() -> + { + foreach, + fun setup/0, + fun teardown/1, + [ + t_should_allow_valid_q(), + t_should_default_on_missing_q(), + t_should_throw_on_invalid_q(), + t_should_allow_valid_n(), + t_should_default_on_missing_n(), + t_should_throw_on_invalid_n(), + t_should_allow_valid_placement(), + t_should_default_on_missing_placement(), + t_should_throw_on_invalid_placement() + ] + }. setup() -> + meck:expect(config, get, fun(_, _, Default) -> Default end), ok. teardown(_) -> @@ -2158,4 +2203,103 @@ t_returns_empty_array_for_no_partitioned_qs() -> ?assertEqual(parse_partitioned_opt(Req), []) end). +t_should_allow_valid_q() -> + ?_test(begin + Req = mock_request("/all-test21?q=1"), + Opts = parse_shards_opt(Req), + ?assertEqual("1", couch_util:get_value(q, Opts)) + end). + +t_should_default_on_missing_q() -> + ?_test(begin + Req = mock_request("/all-test21"), + Opts = parse_shards_opt(Req), + ?assertEqual("8", couch_util:get_value(q, Opts)) + end). + +t_should_throw_on_invalid_q() -> + ?_test(begin + Req = mock_request("/all-test21?q="), + Err = <<"The `q` value should be a positive integer.">>, + ?assertThrow({bad_request, Err}, parse_shards_opt(Req)) + end). + +t_should_allow_valid_n() -> + ?_test(begin + Req = mock_request("/all-test21?n=1"), + Opts = parse_shards_opt(Req), + ?assertEqual("1", couch_util:get_value(n, Opts)) + end). + +t_should_default_on_missing_n() -> + ?_test(begin + Req = mock_request("/all-test21"), + Opts = parse_shards_opt(Req), + ?assertEqual("3", couch_util:get_value(n, Opts)) + end). + +t_should_throw_on_invalid_n() -> + ?_test(begin + Req = mock_request("/all-test21?n="), + Err = <<"The `n` value should be a positive integer.">>, + ?assertThrow({bad_request, Err}, parse_shards_opt(Req)) + end). + +t_should_allow_valid_placement() -> + { + foreach, + fun() -> ok end, + [ + {"single zone", + ?_test(begin + Req = mock_request("/all-test21?placement=az:1"), + Opts = parse_shards_opt(Req), + ?assertEqual("az:1", couch_util:get_value(placement, Opts)) + end)}, + {"multi zone", + ?_test(begin + Req = mock_request("/all-test21?placement=az:1,co:3"), + Opts = parse_shards_opt(Req), + ?assertEqual("az:1,co:3", + couch_util:get_value(placement, Opts)) + end)} + ] + }. + +t_should_default_on_missing_placement() -> + ?_test(begin + Req = mock_request("/all-test21"), + Opts = parse_shards_opt(Req), + ?assertEqual(undefined, couch_util:get_value(placement, Opts)) + end). + +t_should_throw_on_invalid_placement() -> + Err = <<"The `placement` value should be in a format `zone:n`.">>, + { + foreach, + fun() -> ok end, + [ + {"empty placement", + ?_test(begin + Req = mock_request("/all-test21?placement="), + ?assertThrow({bad_request, Err}, parse_shards_opt(Req)) + end)}, + {"invalid format", + ?_test(begin + Req = mock_request("/all-test21?placement=moon"), + ?assertThrow({bad_request, Err}, parse_shards_opt(Req)) + end)}, + {"invalid n", + ?_test(begin + Req = mock_request("/all-test21?placement=moon:eagle"), + ?assertThrow({bad_request, Err}, parse_shards_opt(Req)) + end)}, + {"one invalid zone", + ?_test(begin + Req = mock_request("/all-test21?placement=az:1,co:moon"), + ?assertThrow({bad_request, Err}, parse_shards_opt(Req)) + end)} + ] + }. + -endif. diff --git a/src/chttpd/src/chttpd_misc.erl b/src/chttpd/src/chttpd_misc.erl index ffb5295b5ef..830fea37862 100644 --- a/src/chttpd/src/chttpd_misc.erl +++ b/src/chttpd/src/chttpd_misc.erl @@ -105,7 +105,7 @@ handle_utils_dir_req(Req, _) -> send_method_not_allowed(Req, "GET,HEAD"). maybe_add_csp_headers(Headers, "true") -> - DefaultValues = "default-src 'self'; img-src 'self' data:; font-src 'self'; " + DefaultValues = "child-src 'self' data: blob:; default-src 'self'; img-src 'self' data:; font-src 'self'; " "script-src 'self' 'unsafe-eval'; style-src 'self' 'unsafe-inline';", Value = config:get("csp", "header_value", DefaultValues), [{"Content-Security-Policy", Value} | Headers]; diff --git a/src/chttpd/src/chttpd_rewrite.erl b/src/chttpd/src/chttpd_rewrite.erl index 01965137466..1c2c1f33326 100644 --- a/src/chttpd/src/chttpd_rewrite.erl +++ b/src/chttpd/src/chttpd_rewrite.erl @@ -71,8 +71,9 @@ do_rewrite(#httpd{mochi_req=MochiReq}=Req, {Props}=Rewrite) when is_list(Props) undefined -> erlang:get(mochiweb_request_body); B -> B end, + NewMochiReq:cleanup(), case Body of - undefined -> NewMochiReq:cleanup(); + undefined -> []; _ -> erlang:put(mochiweb_request_body, Body) end, couch_log:debug("rewrite to ~p", [Path]), diff --git a/src/chttpd/test/eunit/chttpd_auth_tests.erl b/src/chttpd/test/eunit/chttpd_auth_tests.erl new file mode 100644 index 00000000000..b4a8eabfb95 --- /dev/null +++ b/src/chttpd/test/eunit/chttpd_auth_tests.erl @@ -0,0 +1,129 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_auth_tests). + +-include_lib("couch/include/couch_eunit.hrl"). +-include_lib("couch/include/couch_db.hrl"). + + +setup() -> + Addr = config:get("chttpd", "bind_address", "127.0.0.1"), + Port = mochiweb_socket_server:get(chttpd, port), + BaseUrl = lists:concat(["http://", Addr, ":", Port]), + BaseUrl. + +teardown(_Url) -> + ok. + + +require_valid_user_exception_test_() -> + { + "_up", + { + setup, + fun chttpd_test_util:start_couch/0, + fun chttpd_test_util:stop_couch/1, + { + foreach, + fun setup/0, fun teardown/1, + [ + fun should_handle_require_valid_user_except_up_on_up_route/1, + fun should_handle_require_valid_user_except_up_on_non_up_routes/1 + ] + } + } + }. + +set_require_user_false() -> + ok = config:set("chttpd", "require_valid_user", "false", _Persist=false). + +set_require_user_true() -> + ok = config:set("chttpd", "require_valid_user", "true", _Persist=false). + +set_require_user_except_for_up_false() -> + ok = config:set("chttpd", "require_valid_user_except_for_up", "false", _Persist=false). + +set_require_user_except_for_up_true() -> + ok = config:set("chttpd", "require_valid_user_except_for_up", "true", _Persist=false). + +should_handle_require_valid_user_except_up_on_up_route(_Url) -> + ?_test(begin + % require_valid_user | require_valid_user_except_up | up needs auth + % 1 F | F | F + % 2 F | T | F + % 3 T | F | T + % 4 T | T | F + + UpRequest = #httpd{path_parts=[<<"_up">>]}, + % we use ?ADMIN_USER here because these tests run under admin party + % so this is equivalent to an unauthenticated request + ExpectAuth = {unauthorized, <<"Authentication required.">>}, + ExpectNoAuth = #httpd{user_ctx=?ADMIN_USER,path_parts=[<<"_up">>]}, + + % 1 + set_require_user_false(), + set_require_user_except_for_up_false(), + Result1 = chttpd_auth:party_mode_handler(UpRequest), + ?assertEqual(ExpectNoAuth, Result1), + + % 2 + set_require_user_false(), + set_require_user_except_for_up_true(), + Result2 = chttpd_auth:party_mode_handler(UpRequest), + ?assertEqual(ExpectNoAuth, Result2), + + % 3 + set_require_user_true(), + set_require_user_except_for_up_false(), + ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(UpRequest)), + + % 4 + set_require_user_true(), + set_require_user_except_for_up_true(), + Result4 = chttpd_auth:party_mode_handler(UpRequest), + ?assertEqual(ExpectNoAuth, Result4) + + end). + +should_handle_require_valid_user_except_up_on_non_up_routes(_Url) -> + ?_test(begin + % require_valid_user | require_valid_user_except_up | everything not _up requires auth + % 5 F | F | F + % 6 F | T | T + % 7 T | F | T + % 8 T | T | T + + NonUpRequest = #httpd{path_parts=[<<"/">>]}, + ExpectAuth = {unauthorized, <<"Authentication required.">>}, + ExpectNoAuth = #httpd{user_ctx=?ADMIN_USER,path_parts=[<<"/">>]}, + % 5 + set_require_user_false(), + set_require_user_except_for_up_false(), + Result5 = chttpd_auth:party_mode_handler(NonUpRequest), + ?assertEqual(ExpectNoAuth, Result5), + + % 6 + set_require_user_false(), + set_require_user_except_for_up_true(), + ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(NonUpRequest)), + + % 7 + set_require_user_true(), + set_require_user_except_for_up_false(), + ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(NonUpRequest)), + + % 8 + set_require_user_true(), + set_require_user_except_for_up_true(), + ?assertThrow(ExpectAuth, chttpd_auth:party_mode_handler(NonUpRequest)) + end). diff --git a/src/chttpd/test/eunit/chttpd_csp_tests.erl b/src/chttpd/test/eunit/chttpd_csp_tests.erl index e8643625458..b80e3fee6c7 100644 --- a/src/chttpd/test/eunit/chttpd_csp_tests.erl +++ b/src/chttpd/test/eunit/chttpd_csp_tests.erl @@ -56,7 +56,7 @@ should_not_return_any_csp_headers_when_disabled(Url) -> should_apply_default_policy(Url) -> ?_assertEqual( - "default-src 'self'; img-src 'self' data:; font-src 'self'; " + "child-src 'self' data: blob:; default-src 'self'; img-src 'self' data:; font-src 'self'; " "script-src 'self' 'unsafe-eval'; style-src 'self' 'unsafe-inline';", begin {ok, _, Headers, _} = test_request:get(Url), diff --git a/src/chttpd/test/eunit/chttpd_db_test.erl b/src/chttpd/test/eunit/chttpd_db_test.erl index 204332d7f99..d844aa5b678 100644 --- a/src/chttpd/test/eunit/chttpd_db_test.erl +++ b/src/chttpd/test/eunit/chttpd_db_test.erl @@ -73,6 +73,7 @@ all_test_() -> fun should_return_update_seq_when_set_on_all_docs/1, fun should_not_return_update_seq_when_unset_on_all_docs/1, fun should_return_correct_id_on_doc_copy/1, + fun should_return_only_one_ok_on_doc_copy/1, fun should_return_400_for_bad_engine/1, fun should_not_change_db_proper_after_rewriting_shardmap/1, fun should_succeed_on_all_docs_with_queries_keys/1, @@ -269,6 +270,17 @@ should_return_correct_id_on_doc_copy(Url) -> ] end)}. +should_return_only_one_ok_on_doc_copy(Url) -> + {timeout, ?TIMEOUT, ?_test(begin + {ok, _, _, _} = create_doc(Url, "testdoc"), + {_, _, _, ResultBody} = test_request:copy(Url ++ "/testdoc", + [?CONTENT_JSON, ?AUTH, ?DESTHEADER1]), + {ResultJson} = jiffy:decode(ResultBody), + NumOks = length(lists:filter(fun({Key, Value}) -> Key == <<"ok">> end, ResultJson)), + [ + ?assertEqual(1, NumOks) + ] + end)}. attachment_doc() -> {ok, Data} = file:read_file(?FIXTURE_TXT), diff --git a/src/chttpd/test/eunit/chttpd_delayed_test.erl b/src/chttpd/test/eunit/chttpd_delayed_test.erl new file mode 100644 index 00000000000..63e6cb0e57e --- /dev/null +++ b/src/chttpd/test/eunit/chttpd_delayed_test.erl @@ -0,0 +1,72 @@ +-module(chttpd_delayed_test). + +-include_lib("couch/include/couch_eunit.hrl"). +-include_lib("couch/include/couch_db.hrl"). + +-define(USER, "chttpd_view_test_admin"). +-define(PASS, "pass"). +-define(AUTH, {basic_auth, {?USER, ?PASS}}). +-define(CONTENT_JSON, {"Content-Type", "application/json"}). +-define(DDOC, "{\"_id\": \"_design/bar\", \"views\": {\"baz\": + {\"map\": \"function(doc) {emit(doc._id, doc._id);}\"}}}"). + +-define(FIXTURE_TXT, ?ABS_PATH(?FILE)). +-define(i2l(I), integer_to_list(I)). +-define(TIMEOUT, 60). % seconds + +setup() -> + Hashed = couch_passwords:hash_admin_password(?PASS), + ok = config:set("admins", ?USER, ?b2l(Hashed), _Persist=false), + ok = config:set("chttpd", "buffer_response", "true", _Persist=false), + TmpDb = ?tempdb(), + Addr = config:get("chttpd", "bind_address", "127.0.0.1"), + Port = mochiweb_socket_server:get(chttpd, port), + Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]), + create_db(Url), + Url. + +teardown(Url) -> + delete_db(Url), + ok = config:delete("admins", ?USER, _Persist=false). + +create_db(Url) -> + {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"), + ?assert(Status =:= 201 orelse Status =:= 202). + + +delete_db(Url) -> + {ok, 200, _, _} = test_request:delete(Url, [?AUTH]). + + +all_test_() -> + { + "chttpd delay tests", + { + setup, + fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1, + { + foreach, + fun setup/0, fun teardown/1, + [ + fun test_buffer_response_all_docs/1, + fun test_buffer_response_changes/1 + ] + } + } + }. + + +test_buffer_response_all_docs(Url) -> + assert_successful_response(Url ++ "/_all_docs"). + + +test_buffer_response_changes(Url) -> + assert_successful_response(Url ++ "/_changes"). + + +assert_successful_response(Url) -> + {timeout, ?TIMEOUT, ?_test(begin + {ok, Code, _Headers, _Body} = test_request:get(Url, [?AUTH]), + ?assertEqual(200, Code) + end)}. + diff --git a/src/couch/include/couch_eunit.hrl b/src/couch/include/couch_eunit.hrl index d3611c88b9c..18852489380 100644 --- a/src/couch/include/couch_eunit.hrl +++ b/src/couch/include/couch_eunit.hrl @@ -49,6 +49,11 @@ Suffix = couch_uuids:random(), iolist_to_binary(["eunit-test-db-", Suffix]) end). +-define(tempshard, + fun() -> + Suffix = couch_uuids:random(), + iolist_to_binary(["shards/80000000-ffffffff/eunit-test-db-", Suffix]) + end). -define(docid, fun() -> integer_to_list(couch_util:unique_monotonic_integer()) diff --git a/src/couch/priv/couch_js/1.8.5/help.h b/src/couch/priv/couch_js/1.8.5/help.h index 678651fd3ed..335935ed01b 100644 --- a/src/couch/priv/couch_js/1.8.5/help.h +++ b/src/couch/priv/couch_js/1.8.5/help.h @@ -16,7 +16,7 @@ #include "config.h" static const char VERSION_TEMPLATE[] = - "%s - %s\n" + "%s - %s (SpiderMonkey 1.8.5)\n" "\n" "Licensed under the Apache License, Version 2.0 (the \"License\"); you may " "not use\n" diff --git a/src/couch/priv/couch_js/60/help.h b/src/couch/priv/couch_js/60/help.h index 678651fd3ed..ffb6eb40ae1 100644 --- a/src/couch/priv/couch_js/60/help.h +++ b/src/couch/priv/couch_js/60/help.h @@ -16,7 +16,7 @@ #include "config.h" static const char VERSION_TEMPLATE[] = - "%s - %s\n" + "%s - %s (SpiderMonkey 60)\n" "\n" "Licensed under the Apache License, Version 2.0 (the \"License\"); you may " "not use\n" diff --git a/src/couch/priv/couch_js/60/http.cpp b/src/couch/priv/couch_js/60/http.cpp index 9ab47b2f099..e1e44d62207 100644 --- a/src/couch/priv/couch_js/60/http.cpp +++ b/src/couch/priv/couch_js/60/http.cpp @@ -18,7 +18,6 @@ #include #include #include "config.h" -#include "utf8.h" #include "util.h" // Soft dependency on cURL bindings because they're @@ -100,7 +99,6 @@ http_check_enabled() #ifdef XP_WIN #define strcasecmp _strcmpi #define strncasecmp _strnicmp -#define snprintf _snprintf #endif @@ -109,7 +107,7 @@ typedef struct curl_slist CurlHeaders; typedef struct { int method; - char* url; + std::string url; CurlHeaders* req_headers; int16_t last_status; } HTTPData; @@ -127,21 +125,15 @@ const char* METHODS[] = {"GET", "HEAD", "POST", "PUT", "DELETE", "COPY", "OPTION #define OPTIONS 6 -static bool -go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t blen); - - -static JSString* -str_from_binary(JSContext* cx, char* data, size_t length); +static bool go(JSContext* cx, JSObject* obj, HTTPData* http, std::string& body); bool http_ctor(JSContext* cx, JSObject* req) { - HTTPData* http = NULL; + HTTPData* http = new HTTPData(); bool ret = false; - http = (HTTPData*) malloc(sizeof(HTTPData)); if(!http) { JS_ReportErrorUTF8(cx, "Failed to create CouchHTTP instance."); @@ -149,7 +141,6 @@ http_ctor(JSContext* cx, JSObject* req) } http->method = -1; - http->url = NULL; http->req_headers = NULL; http->last_status = -1; @@ -159,7 +150,7 @@ http_ctor(JSContext* cx, JSObject* req) goto success; error: - if(http) free(http); + if(http) delete http; success: return ret; @@ -171,9 +162,8 @@ http_dtor(JSFreeOp* fop, JSObject* obj) { HTTPData* http = (HTTPData*) JS_GetPrivate(obj); if(http) { - if(http->url) free(http->url); if(http->req_headers) curl_slist_free_all(http->req_headers); - free(http); + delete http; } } @@ -182,56 +172,50 @@ bool http_open(JSContext* cx, JSObject* req, JS::Value mth, JS::Value url, JS::Value snc) { HTTPData* http = (HTTPData*) JS_GetPrivate(req); - char* method = NULL; int methid; - bool ret = false; if(!http) { JS_ReportErrorUTF8(cx, "Invalid CouchHTTP instance."); - goto done; + return false; } - if(mth.isUndefined()) { - JS_ReportErrorUTF8(cx, "You must specify a method."); - goto done; + if(!mth.isString()) { + JS_ReportErrorUTF8(cx, "Method must be a string."); + return false; } - method = enc_string(cx, mth, NULL); - if(!method) { + std::string method; + if(!js_to_string(cx, JS::RootedValue(cx, mth), method)) { JS_ReportErrorUTF8(cx, "Failed to encode method."); - goto done; + return false; } for(methid = 0; METHODS[methid] != NULL; methid++) { - if(strcasecmp(METHODS[methid], method) == 0) break; + if(strcasecmp(METHODS[methid], method.c_str()) == 0) break; } if(methid > OPTIONS) { JS_ReportErrorUTF8(cx, "Invalid method specified."); - goto done; + return false; } http->method = methid; - if(url.isUndefined()) { - JS_ReportErrorUTF8(cx, "You must specify a URL."); - goto done; - } - - if(http->url != NULL) { - free(http->url); - http->url = NULL; + if(!url.isString()) { + JS_ReportErrorUTF8(cx, "URL must be a string"); + return false; } - http->url = enc_string(cx, url, NULL); - if(http->url == NULL) { + std::string urlstr; + if(!js_to_string(cx, JS::RootedValue(cx, url), urlstr)) { JS_ReportErrorUTF8(cx, "Failed to encode URL."); - goto done; + return false; } + http->url = urlstr; if(snc.isBoolean() && snc.isTrue()) { JS_ReportErrorUTF8(cx, "Synchronous flag must be false."); - goto done; + return false; } if(http->req_headers) { @@ -242,11 +226,7 @@ http_open(JSContext* cx, JSObject* req, JS::Value mth, JS::Value url, JS::Value // Disable Expect: 100-continue http->req_headers = curl_slist_append(http->req_headers, "Expect:"); - ret = true; - -done: - if(method) free(method); - return ret; + return true; } @@ -254,88 +234,60 @@ bool http_set_hdr(JSContext* cx, JSObject* req, JS::Value name, JS::Value val) { HTTPData* http = (HTTPData*) JS_GetPrivate(req); - char* keystr = NULL; - char* valstr = NULL; - char* hdrbuf = NULL; - size_t hdrlen = -1; - bool ret = false; if(!http) { JS_ReportErrorUTF8(cx, "Invalid CouchHTTP instance."); - goto done; + return false; } - if(name.isUndefined()) + if(!name.isString()) { - JS_ReportErrorUTF8(cx, "You must speciy a header name."); - goto done; + JS_ReportErrorUTF8(cx, "Header names must be strings."); + return false; } - keystr = enc_string(cx, name, NULL); - if(!keystr) + std::string keystr; + if(!js_to_string(cx, JS::RootedValue(cx, name), keystr)) { JS_ReportErrorUTF8(cx, "Failed to encode header name."); - goto done; + return false; } - if(val.isUndefined()) + if(!val.isString()) { - JS_ReportErrorUTF8(cx, "You must specify a header value."); - goto done; + JS_ReportErrorUTF8(cx, "Header values must be strings."); + return false; } - valstr = enc_string(cx, val, NULL); - if(!valstr) - { + std::string valstr; + if(!js_to_string(cx, JS::RootedValue(cx, val), valstr)) { JS_ReportErrorUTF8(cx, "Failed to encode header value."); - goto done; - } - - hdrlen = strlen(keystr) + strlen(valstr) + 3; - hdrbuf = (char*) malloc(hdrlen * sizeof(char)); - if(!hdrbuf) { - JS_ReportErrorUTF8(cx, "Failed to allocate header buffer."); - goto done; + return false; } - snprintf(hdrbuf, hdrlen, "%s: %s", keystr, valstr); - http->req_headers = curl_slist_append(http->req_headers, hdrbuf); - - ret = true; + std::string header = keystr + ": " + valstr; + http->req_headers = curl_slist_append(http->req_headers, header.c_str()); -done: - if(keystr) free(keystr); - if(valstr) free(valstr); - if(hdrbuf) free(hdrbuf); - return ret; + return true; } bool http_send(JSContext* cx, JSObject* req, JS::Value body) { HTTPData* http = (HTTPData*) JS_GetPrivate(req); - char* bodystr = NULL; - size_t bodylen = 0; - bool ret = false; if(!http) { JS_ReportErrorUTF8(cx, "Invalid CouchHTTP instance."); - goto done; + return false; } - if(!body.isUndefined()) { - bodystr = enc_string(cx, body, &bodylen); - if(!bodystr) { - JS_ReportErrorUTF8(cx, "Failed to encode body."); - goto done; - } + std::string bodystr; + if(!js_to_string(cx, JS::RootedValue(cx, body), bodystr)) { + JS_ReportErrorUTF8(cx, "Failed to encode body."); + return false; } - ret = go(cx, req, http, bodystr, bodylen); - -done: - if(bodystr) free(bodystr); - return ret; + return go(cx, req, http, bodystr); } int @@ -395,7 +347,7 @@ typedef struct { HTTPData* http; JSContext* cx; JSObject* resp_headers; - char* sendbuf; + const char* sendbuf; size_t sendlen; size_t sent; int sent_once; @@ -417,10 +369,9 @@ static size_t recv_body(void *ptr, size_t size, size_t nmem, void *data); static size_t recv_header(void *ptr, size_t size, size_t nmem, void *data); static bool -go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen) +go(JSContext* cx, JSObject* obj, HTTPData* http, std::string& body) { CurlState state; - char* referer; JSString* jsbody; bool ret = false; JS::Value tmp; @@ -431,8 +382,8 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen) state.cx = cx; state.http = http; - state.sendbuf = body; - state.sendlen = bodylen; + state.sendbuf = body.c_str(); + state.sendlen = body.size(); state.sent = 0; state.sent_once = 0; @@ -463,13 +414,13 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen) tmp = JS_GetReservedSlot(obj, 0); - if(!(referer = enc_string(cx, tmp, NULL))) { + std::string referer; + if(!js_to_string(cx, JS::RootedValue(cx, tmp), referer)) { JS_ReportErrorUTF8(cx, "Failed to encode referer."); if(state.recvbuf) JS_free(cx, state.recvbuf); - return ret; + return ret; } - curl_easy_setopt(HTTP_HANDLE, CURLOPT_REFERER, referer); - free(referer); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_REFERER, referer.c_str()); if(http->method < 0 || http->method > OPTIONS) { JS_ReportErrorUTF8(cx, "INTERNAL: Unknown method."); @@ -490,15 +441,15 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen) curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 0); } - if(body && bodylen) { - curl_easy_setopt(HTTP_HANDLE, CURLOPT_INFILESIZE, bodylen); + if(body.size() > 0) { + curl_easy_setopt(HTTP_HANDLE, CURLOPT_INFILESIZE, body.size()); } else { curl_easy_setopt(HTTP_HANDLE, CURLOPT_INFILESIZE, 0); } // curl_easy_setopt(HTTP_HANDLE, CURLOPT_VERBOSE, 1); - curl_easy_setopt(HTTP_HANDLE, CURLOPT_URL, http->url); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_URL, http->url.c_str()); curl_easy_setopt(HTTP_HANDLE, CURLOPT_HTTPHEADER, http->req_headers); curl_easy_setopt(HTTP_HANDLE, CURLOPT_READDATA, &state); curl_easy_setopt(HTTP_HANDLE, CURLOPT_SEEKDATA, &state); @@ -532,12 +483,13 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen) if(state.recvbuf) { state.recvbuf[state.read] = '\0'; - jsbody = dec_string(cx, state.recvbuf, state.read+1); + std::string bodystr(state.recvbuf, state.read); + jsbody = string_to_js(cx, bodystr); if(!jsbody) { // If we can't decode the body as UTF-8 we forcefully // convert it to a string by just forcing each byte // to a char16_t. - jsbody = str_from_binary(cx, state.recvbuf, state.read); + jsbody = JS_NewStringCopyN(cx, state.recvbuf, state.read); if(!jsbody) { if(!JS_IsExceptionPending(cx)) { JS_ReportErrorUTF8(cx, "INTERNAL: Failed to decode body."); @@ -572,7 +524,7 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen) static size_t send_body(void *ptr, size_t size, size_t nmem, void *data) { - CurlState* state = (CurlState*) data; + CurlState* state = static_cast(data); size_t length = size * nmem; size_t towrite = state->sendlen - state->sent; @@ -598,19 +550,19 @@ send_body(void *ptr, size_t size, size_t nmem, void *data) static int seek_body(void* ptr, curl_off_t offset, int origin) { - CurlState* state = (CurlState*) ptr; + CurlState* state = static_cast(ptr); if(origin != SEEK_SET) return -1; - state->sent = (size_t) offset; - return (int) state->sent; + state->sent = static_cast(offset); + return static_cast(state->sent); } static size_t recv_header(void *ptr, size_t size, size_t nmem, void *data) { - CurlState* state = (CurlState*) data; + CurlState* state = static_cast(data); char code[4]; - char* header = (char*) ptr; + char* header = static_cast(ptr); size_t length = size * nmem; JSString* hdr = NULL; uint32_t hdrlen; @@ -638,7 +590,8 @@ recv_header(void *ptr, size_t size, size_t nmem, void *data) } // Append the new header to our array. - hdr = dec_string(state->cx, header, length); + std::string hdrstr(header, length); + hdr = string_to_js(state->cx, hdrstr); if(!hdr) { return CURLE_WRITE_ERROR; } @@ -659,14 +612,17 @@ recv_header(void *ptr, size_t size, size_t nmem, void *data) static size_t recv_body(void *ptr, size_t size, size_t nmem, void *data) { - CurlState* state = (CurlState*) data; + CurlState* state = static_cast(data); size_t length = size * nmem; char* tmp = NULL; if(!state->recvbuf) { state->recvlen = 4096; state->read = 0; - state->recvbuf = (char *)JS_malloc(state->cx, state->recvlen); + state->recvbuf = static_cast(JS_malloc( + state->cx, + state->recvlen + )); } if(!state->recvbuf) { @@ -676,7 +632,12 @@ recv_body(void *ptr, size_t size, size_t nmem, void *data) // +1 so we can add '\0' back up in the go function. size_t oldlen = state->recvlen; while(length+1 > state->recvlen - state->read) state->recvlen *= 2; - tmp = (char *) JS_realloc(state->cx, state->recvbuf, oldlen, state->recvlen); + tmp = static_cast(JS_realloc( + state->cx, + state->recvbuf, + oldlen, + state->recvlen + )); if(!tmp) return CURLE_WRITE_ERROR; state->recvbuf = tmp; @@ -685,23 +646,4 @@ recv_body(void *ptr, size_t size, size_t nmem, void *data) return length; } -JSString* -str_from_binary(JSContext* cx, char* data, size_t length) -{ - char16_t* conv = (char16_t*) JS_malloc(cx, length * sizeof(char16_t)); - JSString* ret = NULL; - size_t i; - - if(!conv) return NULL; - - for(i = 0; i < length; i++) { - conv[i] = (char16_t) data[i]; - } - - ret = JS_NewUCString(cx, conv, length); - if(!ret) JS_free(cx, conv); - - return ret; -} - #endif /* HAVE_CURL */ diff --git a/src/couch/priv/couch_js/60/main.cpp b/src/couch/priv/couch_js/60/main.cpp index b6157ed850a..828b9dab5c8 100644 --- a/src/couch/priv/couch_js/60/main.cpp +++ b/src/couch/priv/couch_js/60/main.cpp @@ -28,7 +28,6 @@ #include "config.h" #include "http.h" -#include "utf8.h" #include "util.h" static bool enableSharedMemory = true; @@ -99,8 +98,9 @@ req_ctor(JSContext* cx, unsigned int argc, JS::Value* vp) static bool req_open(JSContext* cx, unsigned int argc, JS::Value* vp) { - JSObject* obj = JS_THIS_OBJECT(cx, vp); JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS::Value vobj = args.computeThis(cx); + JSObject* obj = vobj.toObjectOrNull(); bool ret = false; if(argc == 2) { @@ -119,8 +119,9 @@ req_open(JSContext* cx, unsigned int argc, JS::Value* vp) static bool req_set_hdr(JSContext* cx, unsigned int argc, JS::Value* vp) { - JSObject* obj = JS_THIS_OBJECT(cx, vp); JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS::Value vobj = args.computeThis(cx); + JSObject* obj = vobj.toObjectOrNull(); bool ret = false; if(argc == 2) { @@ -137,8 +138,9 @@ req_set_hdr(JSContext* cx, unsigned int argc, JS::Value* vp) static bool req_send(JSContext* cx, unsigned int argc, JS::Value* vp) { - JSObject* obj = JS_THIS_OBJECT(cx, vp); JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS::Value vobj = args.computeThis(cx); + JSObject* obj = vobj.toObjectOrNull(); bool ret = false; if(argc == 1) { @@ -155,7 +157,9 @@ static bool req_status(JSContext* cx, unsigned int argc, JS::Value* vp) { JS::CallArgs args = JS::CallArgsFromVp(argc, vp); - JSObject* obj = JS_THIS_OBJECT(cx, vp); + JS::Value vobj = args.computeThis(cx); + JSObject* obj = vobj.toObjectOrNull(); + int status = http_status(cx, obj); if(status < 0) @@ -169,8 +173,10 @@ static bool base_url(JSContext *cx, unsigned int argc, JS::Value* vp) { JS::CallArgs args = JS::CallArgsFromVp(argc, vp); - JSObject* obj = JS_THIS_OBJECT(cx, vp); - couch_args *cargs = (couch_args*)JS_GetContextPrivate(cx); + JS::Value vobj = args.computeThis(cx); + JSObject* obj = vobj.toObjectOrNull(); + + couch_args *cargs = static_cast(JS_GetContextPrivate(cx)); JS::Value uri_val; bool rc = http_uri(cx, obj, cargs, &uri_val); args.rval().set(uri_val); @@ -226,9 +232,15 @@ evalcx(JSContext *cx, unsigned int argc, JS::Value* vp) if (!sandbox) return false; } - JS_BeginRequest(cx); + JSAutoRequest ar(cx); + if (!sandbox) { + sandbox = NewSandbox(cx, false); + if (!sandbox) + return false; + } + js::AutoStableStringChars strChars(cx); if (!strChars.initTwoByte(cx, str)) return false; @@ -237,12 +249,6 @@ evalcx(JSContext *cx, unsigned int argc, JS::Value* vp) size_t srclen = chars.length(); const char16_t* src = chars.begin().get(); - if (!sandbox) { - sandbox = NewSandbox(cx, false); - if (!sandbox) - return false; - } - if(srclen == 0) { args.rval().setObject(*sandbox); } else { @@ -283,7 +289,19 @@ static bool print(JSContext* cx, unsigned int argc, JS::Value* vp) { JS::CallArgs args = JS::CallArgsFromVp(argc, vp); - couch_print(cx, argc, args); + + bool use_stderr = false; + if(argc > 1 && args[1].isTrue()) { + use_stderr = true; + } + + if(!args[0].isString()) { + JS_ReportErrorUTF8(cx, "Unable to print non-string value."); + return false; + } + + couch_print(cx, args[0], use_stderr); + args.rval().setUndefined(); return true; } @@ -386,7 +404,7 @@ static JSFunctionSpec global_functions[] = { static bool csp_allows(JSContext* cx) { - couch_args *args = (couch_args*)JS_GetContextPrivate(cx); + couch_args* args = static_cast(JS_GetContextPrivate(cx)); if(args->eval) { return true; } else { @@ -473,10 +491,18 @@ main(int argc, const char* argv[]) // Compile and run JS::CompileOptions options(cx); options.setFileAndLine(args->scripts[i], 1); + options.setUTF8(true); JS::RootedScript script(cx); if(!JS_CompileScript(cx, scriptsrc, slen, options, &script)) { - fprintf(stderr, "Failed to compile script.\n"); + JS::RootedValue exc(cx); + if(!JS_GetPendingException(cx, &exc)) { + fprintf(stderr, "Failed to compile script.\n"); + } else { + JS::RootedObject exc_obj(cx, &exc.toObject()); + JSErrorReport* report = JS_ErrorFromException(cx, exc_obj); + couch_error(cx, report); + } return 1; } @@ -484,7 +510,14 @@ main(int argc, const char* argv[]) JS::RootedValue result(cx); if(JS_ExecuteScript(cx, script, &result) != true) { - fprintf(stderr, "Failed to execute script.\n"); + JS::RootedValue exc(cx); + if(!JS_GetPendingException(cx, &exc)) { + fprintf(stderr, "Failed to execute script.\n"); + } else { + JS::RootedObject exc_obj(cx, &exc.toObject()); + JSErrorReport* report = JS_ErrorFromException(cx, exc_obj); + couch_error(cx, report); + } return 1; } diff --git a/src/couch/priv/couch_js/60/utf8.cpp b/src/couch/priv/couch_js/60/utf8.cpp deleted file mode 100644 index 38dfa62245d..00000000000 --- a/src/couch/priv/couch_js/60/utf8.cpp +++ /dev/null @@ -1,301 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - -#include -#include -#include -#include -#include "config.h" -#include "util.h" - -static int -enc_char(uint8_t *utf8Buffer, uint32_t ucs4Char) -{ - int utf8Length = 1; - - if (ucs4Char < 0x80) - { - *utf8Buffer = (uint8_t)ucs4Char; - } - else - { - int i; - uint32_t a = ucs4Char >> 11; - utf8Length = 2; - while(a) - { - a >>= 5; - utf8Length++; - } - i = utf8Length; - while(--i) - { - utf8Buffer[i] = (uint8_t)((ucs4Char & 0x3F) | 0x80); - ucs4Char >>= 6; - } - *utf8Buffer = (uint8_t)(0x100 - (1 << (8-utf8Length)) + ucs4Char); - } - - return utf8Length; -} - -static bool -enc_charbuf(const char16_t* src, size_t srclen, char* dst, size_t* dstlenp) -{ - size_t i; - size_t utf8Len; - size_t dstlen = *dstlenp; - size_t origDstlen = dstlen; - char16_t c; - char16_t c2; - uint32_t v; - uint8_t utf8buf[6]; - - if(!dst) - { - dstlen = origDstlen = (size_t) -1; - } - - while(srclen) - { - c = *src++; - srclen--; - - if(c <= 0xD7FF || c >= 0xE000) - { - v = (uint32_t) c; - } - else if(c >= 0xD800 && c <= 0xDBFF) - { - if(srclen < 1) goto buffer_too_small; - c2 = *src++; - srclen--; - if(c2 >= 0xDC00 && c2 <= 0xDFFF) - { - v = (uint32_t) (((c - 0xD800) << 10) + (c2 - 0xDC00) + 0x10000); - } - else - { - // Invalid second half of surrogate pair - v = (uint32_t) 0xFFFD; - // Undo our character advancement - src--; - srclen++; - } - } - else - { - // Invalid first half surrogate pair - v = (uint32_t) 0xFFFD; - } - - if(v < 0x0080) - { - /* no encoding necessary - performance hack */ - if(!dstlen) goto buffer_too_small; - if(dst) *dst++ = (char) v; - utf8Len = 1; - } - else - { - utf8Len = enc_char(utf8buf, v); - if(utf8Len > dstlen) goto buffer_too_small; - if(dst) - { - for (i = 0; i < utf8Len; i++) - { - *dst++ = (char) utf8buf[i]; - } - } - } - dstlen -= utf8Len; - } - - *dstlenp = (origDstlen - dstlen); - return true; - -buffer_too_small: - *dstlenp = (origDstlen - dstlen); - return false; -} - -char* -enc_string(JSContext* cx, JS::Value arg, size_t* buflen) -{ - JSString* str = NULL; - const char16_t* src = NULL; - char* bytes = NULL; - size_t srclen = 0; - size_t byteslen = 0; - js::AutoStableStringChars rawChars(cx); - - str = arg.toString(); - if(!str) goto error; - - if (!rawChars.initTwoByte(cx, str)) - return NULL; - - src = rawChars.twoByteRange().begin().get(); - srclen = JS_GetStringLength(str); - - if(!enc_charbuf(src, srclen, NULL, &byteslen)) goto error; - - bytes = (char *)JS_malloc(cx, (byteslen) + 1); - bytes[byteslen] = 0; - - if(!enc_charbuf(src, srclen, bytes, &byteslen)) goto error; - - if(buflen) *buflen = byteslen; - goto success; - -error: - if(bytes != NULL) JS_free(cx, bytes); - bytes = NULL; - -success: - return bytes; -} - -static uint32_t -dec_char(const uint8_t *utf8Buffer, int utf8Length) -{ - uint32_t ucs4Char; - uint32_t minucs4Char; - - /* from Unicode 3.1, non-shortest form is illegal */ - static const uint32_t minucs4Table[] = { - 0x00000080, 0x00000800, 0x0001000, 0x0020000, 0x0400000 - }; - - if (utf8Length == 1) - { - ucs4Char = *utf8Buffer; - } - else - { - ucs4Char = *utf8Buffer++ & ((1<<(7-utf8Length))-1); - minucs4Char = minucs4Table[utf8Length-2]; - while(--utf8Length) - { - ucs4Char = ucs4Char<<6 | (*utf8Buffer++ & 0x3F); - } - if(ucs4Char < minucs4Char || ucs4Char == 0xFFFE || ucs4Char == 0xFFFF) - { - ucs4Char = 0xFFFD; - } - } - - return ucs4Char; -} - -static bool -dec_charbuf(const char *src, size_t srclen, char16_t *dst, size_t *dstlenp) -{ - uint32_t v; - size_t offset = 0; - size_t j; - size_t n; - size_t dstlen = *dstlenp; - size_t origDstlen = dstlen; - - if(!dst) dstlen = origDstlen = (size_t) -1; - - while(srclen) - { - v = (uint8_t) *src; - n = 1; - - if(v & 0x80) - { - while(v & (0x80 >> n)) - { - n++; - } - - if(n > srclen) goto buffer_too_small; - if(n == 1 || n > 6) goto bad_character; - - for(j = 1; j < n; j++) - { - if((src[j] & 0xC0) != 0x80) goto bad_character; - } - - v = dec_char((const uint8_t *) src, n); - if(v >= 0x10000) - { - v -= 0x10000; - - if(v > 0xFFFFF || dstlen < 2) - { - *dstlenp = (origDstlen - dstlen); - return false; - } - - if(dstlen < 2) goto buffer_too_small; - - if(dst) - { - *dst++ = (char16_t)((v >> 10) + 0xD800); - v = (char16_t)((v & 0x3FF) + 0xDC00); - } - dstlen--; - } - } - - if(!dstlen) goto buffer_too_small; - if(dst) *dst++ = (char16_t) v; - - dstlen--; - offset += n; - src += n; - srclen -= n; - } - - *dstlenp = (origDstlen - dstlen); - return true; - -bad_character: - *dstlenp = (origDstlen - dstlen); - return false; - -buffer_too_small: - *dstlenp = (origDstlen - dstlen); - return false; -} - -JSString* -dec_string(JSContext* cx, const char* bytes, size_t byteslen) -{ - JSString* str = NULL; - char16_t* chars = NULL; - size_t charslen; - - if(!dec_charbuf(bytes, byteslen, NULL, &charslen)) goto error; - - chars = (char16_t *)JS_malloc(cx, (charslen + 1) * sizeof(char16_t)); - if(!chars) return NULL; - chars[charslen] = 0; - - if(!dec_charbuf(bytes, byteslen, chars, &charslen)) goto error; - - str = JS_NewUCString(cx, chars, charslen - 1); - if(!str) goto error; - - goto success; - -error: - if(chars != NULL) JS_free(cx, chars); - str = NULL; - -success: - return str; -} diff --git a/src/couch/priv/couch_js/60/util.cpp b/src/couch/priv/couch_js/60/util.cpp index 92c6cbf4a23..c37c41f2fae 100644 --- a/src/couch/priv/couch_js/60/util.cpp +++ b/src/couch/priv/couch_js/60/util.cpp @@ -13,53 +13,76 @@ #include #include +#include + #include #include +#include #include +#include #include "help.h" #include "util.h" -#include "utf8.h" std::string js_to_string(JSContext* cx, JS::HandleValue val) { + JS::AutoSaveExceptionState exc_state(cx); JS::RootedString sval(cx); sval = val.toString(); JS::UniqueChars chars(JS_EncodeStringToUTF8(cx, sval)); if(!chars) { JS_ClearPendingException(cx); - fprintf(stderr, "Error converting value to string.\n"); - exit(3); + return std::string(); } return chars.get(); } -std::string -js_to_string(JSContext* cx, JSString *str) +bool +js_to_string(JSContext* cx, JS::HandleValue val, std::string& str) { - JS::UniqueChars chars(JS_EncodeString(cx, str)); - if(!chars) { - JS_ClearPendingException(cx); - fprintf(stderr, "Error converting to string.\n"); - exit(3); + if(!val.isString()) { + return false; } - return chars.get(); + if(JS_GetStringLength(val.toString()) == 0) { + str = ""; + return true; + } + + std::string conv = js_to_string(cx, val); + if(!conv.size()) { + return false; + } + + str = conv; + return true; } JSString* -string_to_js(JSContext* cx, const std::string& s) +string_to_js(JSContext* cx, const std::string& raw) { - JSString* ret = JS_NewStringCopyN(cx, s.c_str(), s.size()); - if(ret != nullptr) { - return ret; + JS::UTF8Chars utf8(raw.c_str(), raw.size()); + JS::UniqueTwoByteChars utf16; + size_t len; + + utf16.reset(JS::UTF8CharsToNewTwoByteCharsZ(cx, utf8, &len).get()); + if(!utf16) { + return nullptr; + } + + JSString* ret = JS_NewUCString(cx, utf16.get(), len); + + if(ret) { + // JS_NewUCString took ownership on success. We shift + // the resulting pointer into Unused to silence the + // compiler warning. + mozilla::Unused << utf16.release(); } - fprintf(stderr, "Unable to allocate string object.\n"); - exit(3); + return ret; } size_t @@ -84,21 +107,21 @@ couch_readfile(const char* file, char** outbuf_p) while((nread = fread(fbuf, 1, 16384, fp)) > 0) { if(buf == NULL) { - buf = (char*) malloc(nread + 1); + buf = new char[nread + 1]; if(buf == NULL) { fprintf(stderr, "Out of memory.\n"); exit(3); } memcpy(buf, fbuf, nread); } else { - tmp = (char*) malloc(buflen + nread + 1); + tmp = new char[buflen + nread + 1]; if(tmp == NULL) { fprintf(stderr, "Out of memory.\n"); exit(3); } memcpy(tmp, buf, buflen); memcpy(tmp+buflen, fbuf, nread); - free(buf); + delete buf; buf = tmp; } buflen += nread; @@ -114,12 +137,17 @@ couch_parse_args(int argc, const char* argv[]) couch_args* args; int i = 1; - args = (couch_args*) malloc(sizeof(couch_args)); + args = new couch_args(); if(args == NULL) return NULL; - memset(args, '\0', sizeof(couch_args)); + args->eval = 0; + args->use_http = 0; + args->use_test_funs = 0; args->stack_size = 64L * 1024L * 1024L; + args->scripts = nullptr; + args->uri_file = nullptr; + args->uri = nullptr; while(i < argc) { if(strcmp("-h", argv[i]) == 0) { @@ -193,7 +221,7 @@ couch_readline(JSContext* cx, FILE* fp) size_t oldbyteslen = 256; size_t readlen = 0; - bytes = (char *)JS_malloc(cx, byteslen); + bytes = static_cast(JS_malloc(cx, byteslen)); if(bytes == NULL) return NULL; while((readlen = couch_fgets(bytes+used, byteslen-used, fp)) > 0) { @@ -207,7 +235,7 @@ couch_readline(JSContext* cx, FILE* fp) // Double our buffer and read more. oldbyteslen = byteslen; byteslen *= 2; - tmp = (char *)JS_realloc(cx, bytes, oldbyteslen, byteslen); + tmp = static_cast(JS_realloc(cx, bytes, oldbyteslen, byteslen)); if(!tmp) { JS_free(cx, bytes); return NULL; @@ -222,8 +250,8 @@ couch_readline(JSContext* cx, FILE* fp) return JS_NewStringCopyZ(cx, nullptr); } - // Shring the buffer to the actual data size - tmp = (char *)JS_realloc(cx, bytes, byteslen, used); + // Shrink the buffer to the actual data size + tmp = static_cast(JS_realloc(cx, bytes, byteslen, used)); if(!tmp) { JS_free(cx, bytes); return NULL; @@ -238,22 +266,16 @@ couch_readline(JSContext* cx, FILE* fp) void -couch_print(JSContext* cx, unsigned int argc, JS::CallArgs argv) +couch_print(JSContext* cx, JS::HandleValue obj, bool use_stderr) { - uint8_t* bytes = nullptr; - FILE *stream = stdout; + FILE* stream = stdout; - if (argc) { - if (argc > 1 && argv[1].isTrue()) { - stream = stderr; - } - JSString* str = JS::ToString(cx, argv.get(0)); - bytes = reinterpret_cast(JS_EncodeString(cx, str)); - fprintf(stream, "%s", bytes); - JS_free(cx, bytes); + if(use_stderr) { + stream = stderr; } - fputc('\n', stream); + std::string val = js_to_string(cx, obj); + fprintf(stream, "%s\n", val.c_str()); fflush(stream); } @@ -261,51 +283,63 @@ couch_print(JSContext* cx, unsigned int argc, JS::CallArgs argv) void couch_error(JSContext* cx, JSErrorReport* report) { - JS::RootedValue v(cx), stack(cx), replace(cx); - char* bytes; - JSObject* regexp; - - if(!report || !JSREPORT_IS_WARNING(report->flags)) - { - fprintf(stderr, "%s\n", report->message().c_str()); - - // Print a stack trace, if available. - if (JSREPORT_IS_EXCEPTION(report->flags) && - JS_GetPendingException(cx, &v)) - { - // Clear the exception before an JS method calls or the result is - // infinite, recursive error report generation. - JS_ClearPendingException(cx); - - // Use JS regexp to indent the stack trace. - // If the regexp can't be created, don't JS_ReportErrorUTF8 since it is - // probably not productive to wind up here again. - JS::RootedObject vobj(cx, v.toObjectOrNull()); - - if(JS_GetProperty(cx, vobj, "stack", &stack) && - (regexp = JS_NewRegExpObject( - cx, "^(?=.)", 6, JSREG_GLOB | JSREG_MULTILINE))) - { - // Set up the arguments to ``String.replace()`` - JS::AutoValueVector re_args(cx); - JS::RootedValue arg0(cx, JS::ObjectValue(*regexp)); - auto arg1 = JS::StringValue(string_to_js(cx, "\t")); - - if (re_args.append(arg0) && re_args.append(arg1)) { - // Perform the replacement - JS::RootedObject sobj(cx, stack.toObjectOrNull()); - if(JS_GetProperty(cx, sobj, "replace", &replace) && - JS_CallFunctionValue(cx, sobj, replace, re_args, &v)) - { - // Print the result - bytes = enc_string(cx, v, NULL); - fprintf(stderr, "Stacktrace:\n%s", bytes); - JS_free(cx, bytes); - } - } - } + if(!report) { + return; + } + + if(JSREPORT_IS_WARNING(report->flags)) { + return; + } + + std::ostringstream msg; + msg << "error: " << report->message().c_str(); + + mozilla::Maybe ac; + JS::RootedValue exc(cx); + JS::RootedObject exc_obj(cx); + JS::RootedObject stack_obj(cx); + JS::RootedString stack_str(cx); + JS::RootedValue stack_val(cx); + + if(!JS_GetPendingException(cx, &exc)) { + goto done; + } + + // Clear the exception before an JS method calls or the result is + // infinite, recursive error report generation. + JS_ClearPendingException(cx); + + exc_obj.set(exc.toObjectOrNull()); + stack_obj.set(JS::ExceptionStackOrNull(exc_obj)); + + if(!stack_obj) { + // Compilation errors don't have a stack + + msg << " at "; + + if(report->filename) { + msg << report->filename; + } else { + msg << ""; + } + + if(report->lineno) { + msg << ':' << report->lineno << ':' << report->column; } + + goto done; + } + + if(!JS::BuildStackString(cx, stack_obj, &stack_str, 2)) { + goto done; } + + stack_val.set(JS::StringValue(stack_str)); + msg << std::endl << std::endl << js_to_string(cx, stack_val).c_str(); + +done: + msg << std::endl; + fprintf(stderr, "%s", msg.str().c_str()); } diff --git a/src/couch/priv/couch_js/60/util.h b/src/couch/priv/couch_js/60/util.h index 407e3e60283..4c27f0f668d 100644 --- a/src/couch/priv/couch_js/60/util.h +++ b/src/couch/priv/couch_js/60/util.h @@ -26,14 +26,14 @@ typedef struct { } couch_args; std::string js_to_string(JSContext* cx, JS::HandleValue val); -std::string js_to_string(JSContext* cx, JSString *str); +bool js_to_string(JSContext* cx, JS::HandleValue val, std::string& str); JSString* string_to_js(JSContext* cx, const std::string& s); couch_args* couch_parse_args(int argc, const char* argv[]); int couch_fgets(char* buf, int size, FILE* fp); JSString* couch_readline(JSContext* cx, FILE* fp); size_t couch_readfile(const char* file, char** outbuf_p); -void couch_print(JSContext* cx, unsigned int argc, JS::CallArgs argv); +void couch_print(JSContext* cx, JS::HandleValue str, bool use_stderr); void couch_error(JSContext* cx, JSErrorReport* report); void couch_oom(JSContext* cx, void* data); bool couch_load_funcs(JSContext* cx, JS::HandleObject obj, JSFunctionSpec* funcs); diff --git a/src/couch/priv/couch_js/68/help.h b/src/couch/priv/couch_js/68/help.h new file mode 100644 index 00000000000..c5cb832856a --- /dev/null +++ b/src/couch/priv/couch_js/68/help.h @@ -0,0 +1,86 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +#ifndef COUCHJS_HELP_H +#define COUCHJS_HELP_H + +#include "config.h" + +static const char VERSION_TEMPLATE[] = + "%s - %s (SpiderMonkey 68)\n" + "\n" + "Licensed under the Apache License, Version 2.0 (the \"License\"); you may " + "not use\n" + "this file except in compliance with the License. You may obtain a copy of" + "the\n" + "License at\n" + "\n" + " http://www.apache.org/licenses/LICENSE-2.0\n" + "\n" + "Unless required by applicable law or agreed to in writing, software " + "distributed\n" + "under the License is distributed on an \"AS IS\" BASIS, WITHOUT " + "WARRANTIES OR\n" + "CONDITIONS OF ANY KIND, either express or implied. See the License " + "for the\n" + "specific language governing permissions and limitations under the " + "License.\n"; + +static const char USAGE_TEMPLATE[] = + "Usage: %s [FILE]\n" + "\n" + "The %s command runs the %s JavaScript interpreter.\n" + "\n" + "The exit status is 0 for success or 1 for failure.\n" + "\n" + "Options:\n" + "\n" + " -h display a short help message and exit\n" + " -V display version information and exit\n" + " -H enable %s cURL bindings (only avaiable\n" + " if package was built with cURL available)\n" + " -T enable test suite specific functions (these\n" + " should not be enabled for production systems)\n" + " -S SIZE specify that the runtime should allow at\n" + " most SIZE bytes of memory to be allocated\n" + " default is 64 MiB\n" + " -u FILE path to a .uri file containing the address\n" + " (or addresses) of one or more servers\n" + " --eval Enable runtime code evaluation (dangerous!)\n" + "\n" + "Report bugs at <%s>.\n"; + +#define BASENAME COUCHJS_NAME + +#define couch_version(basename) \ + fprintf( \ + stdout, \ + VERSION_TEMPLATE, \ + basename, \ + PACKAGE_STRING) + +#define DISPLAY_VERSION couch_version(BASENAME) + + +#define couch_usage(basename) \ + fprintf( \ + stdout, \ + USAGE_TEMPLATE, \ + basename, \ + basename, \ + PACKAGE_NAME, \ + basename, \ + PACKAGE_BUGREPORT) + +#define DISPLAY_USAGE couch_usage(BASENAME) + +#endif // Included help.h diff --git a/src/couch/priv/couch_js/68/http.cpp b/src/couch/priv/couch_js/68/http.cpp new file mode 100644 index 00000000000..20a609701a4 --- /dev/null +++ b/src/couch/priv/couch_js/68/http.cpp @@ -0,0 +1,650 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +#include +#include +#include +#include +#include +#include +#include +#include +#include "config.h" +#include "util.h" + +// Soft dependency on cURL bindings because they're +// only used when running the JS tests from the +// command line which is rare. +#ifndef HAVE_CURL + +void +http_check_enabled() +{ + fprintf(stderr, "HTTP API was disabled at compile time.\n"); + exit(3); +} + + +bool +http_ctor(JSContext* cx, JSObject* req) +{ + return false; +} + + +void +http_dtor(JSFreeOp* fop, JSObject* req) +{ + return; +} + + +bool +http_open(JSContext* cx, JSObject* req, JS::Value mth, JS::Value url, JS::Value snc) +{ + return false; +} + + +bool +http_set_hdr(JSContext* cx, JSObject* req, JS::Value name, JS::Value val) +{ + return false; +} + + +bool +http_send(JSContext* cx, JSObject* req, JS::Value body) +{ + return false; +} + + +int +http_status(JSContext* cx, JSObject* req) +{ + return -1; +} + +bool +http_uri(JSContext* cx, JSObject* req, couch_args* args, JS::Value* uri_val) +{ + return false; +} + + +#else +#include +#ifndef XP_WIN +#include +#endif + + +void +http_check_enabled() +{ + return; +} + + +// Map some of the string function names to things which exist on Windows +#ifdef XP_WIN +#define strcasecmp _strcmpi +#define strncasecmp _strnicmp +#endif + + +typedef struct curl_slist CurlHeaders; + + +typedef struct { + int method; + std::string url; + CurlHeaders* req_headers; + int16_t last_status; +} HTTPData; + + +const char* METHODS[] = {"GET", "HEAD", "POST", "PUT", "DELETE", "COPY", "OPTIONS", NULL}; + + +#define GET 0 +#define HEAD 1 +#define POST 2 +#define PUT 3 +#define DELETE 4 +#define COPY 5 +#define OPTIONS 6 + + +static bool go(JSContext* cx, JSObject* obj, HTTPData* http, std::string& body); + + +bool +http_ctor(JSContext* cx, JSObject* req) +{ + HTTPData* http = new HTTPData(); + bool ret = false; + + if(!http) + { + JS_ReportErrorUTF8(cx, "Failed to create CouchHTTP instance."); + goto error; + } + + http->method = -1; + http->req_headers = NULL; + http->last_status = -1; + + JS_SetPrivate(req, http); + + ret = true; + goto success; + +error: + if(http) delete http; + +success: + return ret; +} + + +void +http_dtor(JSFreeOp* fop, JSObject* obj) +{ + HTTPData* http = (HTTPData*) JS_GetPrivate(obj); + if(http) { + if(http->req_headers) curl_slist_free_all(http->req_headers); + delete http; + } +} + + +bool +http_open(JSContext* cx, JSObject* req, JS::Value mth, JS::Value url, JS::Value snc) +{ + HTTPData* http = (HTTPData*) JS_GetPrivate(req); + int methid; + + if(!http) { + JS_ReportErrorUTF8(cx, "Invalid CouchHTTP instance."); + return false; + } + + if(!mth.isString()) { + JS_ReportErrorUTF8(cx, "Method must be a string."); + return false; + } + + std::string method; + if(!js_to_string(cx, JS::RootedValue(cx, mth), method)) { + JS_ReportErrorUTF8(cx, "Failed to encode method."); + return false; + } + + for(methid = 0; METHODS[methid] != NULL; methid++) { + if(strcasecmp(METHODS[methid], method.c_str()) == 0) break; + } + + if(methid > OPTIONS) { + JS_ReportErrorUTF8(cx, "Invalid method specified."); + return false; + } + + http->method = methid; + + if(!url.isString()) { + JS_ReportErrorUTF8(cx, "URL must be a string"); + return false; + } + + std::string urlstr; + if(!js_to_string(cx, JS::RootedValue(cx, url), urlstr)) { + JS_ReportErrorUTF8(cx, "Failed to encode URL."); + return false; + } + http->url = urlstr; + + if(snc.isBoolean() && snc.isTrue()) { + JS_ReportErrorUTF8(cx, "Synchronous flag must be false."); + return false; + } + + if(http->req_headers) { + curl_slist_free_all(http->req_headers); + http->req_headers = NULL; + } + + // Disable Expect: 100-continue + http->req_headers = curl_slist_append(http->req_headers, "Expect:"); + + return true; +} + + +bool +http_set_hdr(JSContext* cx, JSObject* req, JS::Value name, JS::Value val) +{ + HTTPData* http = (HTTPData*) JS_GetPrivate(req); + + if(!http) { + JS_ReportErrorUTF8(cx, "Invalid CouchHTTP instance."); + return false; + } + + if(!name.isString()) + { + JS_ReportErrorUTF8(cx, "Header names must be strings."); + return false; + } + + std::string keystr; + if(!js_to_string(cx, JS::RootedValue(cx, name), keystr)) + { + JS_ReportErrorUTF8(cx, "Failed to encode header name."); + return false; + } + + if(!val.isString()) + { + JS_ReportErrorUTF8(cx, "Header values must be strings."); + return false; + } + + std::string valstr; + if(!js_to_string(cx, JS::RootedValue(cx, val), valstr)) { + JS_ReportErrorUTF8(cx, "Failed to encode header value."); + return false; + } + + std::string header = keystr + ": " + valstr; + http->req_headers = curl_slist_append(http->req_headers, header.c_str()); + + return true; +} + +bool +http_send(JSContext* cx, JSObject* req, JS::Value body) +{ + HTTPData* http = (HTTPData*) JS_GetPrivate(req); + + if(!http) { + JS_ReportErrorUTF8(cx, "Invalid CouchHTTP instance."); + return false; + } + + std::string bodystr; + if(!js_to_string(cx, JS::RootedValue(cx, body), bodystr)) { + JS_ReportErrorUTF8(cx, "Failed to encode body."); + return false; + } + + return go(cx, req, http, bodystr); +} + +int +http_status(JSContext* cx, JSObject* req) +{ + HTTPData* http = (HTTPData*) JS_GetPrivate(req); + + if(!http) { + JS_ReportErrorUTF8(cx, "Invalid CouchHTTP instance."); + return false; + } + + return http->last_status; +} + +bool +http_uri(JSContext* cx, JSObject* req, couch_args* args, JS::Value* uri_val) +{ + FILE* uri_fp = NULL; + JSString* uri_str; + + // Default is http://localhost:15986/ when no uri file is specified + if (!args->uri_file) { + uri_str = JS_NewStringCopyZ(cx, "http://localhost:15986/"); + *uri_val = JS::StringValue(uri_str); + JS_SetReservedSlot(req, 0, *uri_val); + return true; + } + + // Else check to see if the base url is cached in a reserved slot + *uri_val = JS_GetReservedSlot(req, 0); + if (!(*uri_val).isUndefined()) { + return true; + } + + // Read the first line of the couch.uri file. + if(!((uri_fp = fopen(args->uri_file, "r")) && + (uri_str = couch_readline(cx, uri_fp)))) { + JS_ReportErrorUTF8(cx, "Failed to read couch.uri file."); + goto error; + } + + fclose(uri_fp); + *uri_val = JS::StringValue(uri_str); + JS_SetReservedSlot(req, 0, *uri_val); + return true; + +error: + if(uri_fp) fclose(uri_fp); + return false; +} + + +// Curl Helpers + +typedef struct { + HTTPData* http; + JSContext* cx; + JSObject* resp_headers; + const char* sendbuf; + size_t sendlen; + size_t sent; + int sent_once; + char* recvbuf; + size_t recvlen; + size_t read; +} CurlState; + +/* + * I really hate doing this but this doesn't have to be + * uber awesome, it just has to work. + */ +CURL* HTTP_HANDLE = NULL; +char ERRBUF[CURL_ERROR_SIZE]; + +static size_t send_body(void *ptr, size_t size, size_t nmem, void *data); +static int seek_body(void *ptr, curl_off_t offset, int origin); +static size_t recv_body(void *ptr, size_t size, size_t nmem, void *data); +static size_t recv_header(void *ptr, size_t size, size_t nmem, void *data); + +static bool +go(JSContext* cx, JSObject* obj, HTTPData* http, std::string& body) +{ + CurlState state; + JSString* jsbody; + bool ret = false; + JS::Value tmp; + JS::RootedObject robj(cx, obj); + JS::RootedValue vobj(cx); + + + state.cx = cx; + state.http = http; + + state.sendbuf = body.c_str();; + state.sendlen = body.size(); + state.sent = 0; + state.sent_once = 0; + + state.recvbuf = NULL; + state.recvlen = 0; + state.read = 0; + + if(HTTP_HANDLE == NULL) { + HTTP_HANDLE = curl_easy_init(); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_READFUNCTION, send_body); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_SEEKFUNCTION, + (curl_seek_callback) seek_body); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_HEADERFUNCTION, recv_header); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_WRITEFUNCTION, recv_body); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_NOPROGRESS, 1); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_ERRORBUFFER, ERRBUF); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_COOKIEFILE, ""); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_USERAGENT, + "CouchHTTP Client - Relax"); + } + + if(!HTTP_HANDLE) { + JS_ReportErrorUTF8(cx, "Failed to initialize cURL handle."); + if(state.recvbuf) JS_free(cx, state.recvbuf); + return ret; + } + + tmp = JS_GetReservedSlot(obj, 0); + + std::string referer; + if(!js_to_string(cx, JS::RootedValue(cx, tmp), referer)) { + JS_ReportErrorUTF8(cx, "Failed to encode referer."); + if(state.recvbuf) JS_free(cx, state.recvbuf); + return ret; + } + curl_easy_setopt(HTTP_HANDLE, CURLOPT_REFERER, referer.c_str()); + + if(http->method < 0 || http->method > OPTIONS) { + JS_ReportErrorUTF8(cx, "INTERNAL: Unknown method."); + if(state.recvbuf) JS_free(cx, state.recvbuf); + return ret; + } + + curl_easy_setopt(HTTP_HANDLE, CURLOPT_CUSTOMREQUEST, METHODS[http->method]); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_NOBODY, 0); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 1); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_UPLOAD, 0); + + if(http->method == HEAD) { + curl_easy_setopt(HTTP_HANDLE, CURLOPT_NOBODY, 1); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 0); + } else if(http->method == POST || http->method == PUT) { + curl_easy_setopt(HTTP_HANDLE, CURLOPT_UPLOAD, 1); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_FOLLOWLOCATION, 0); + } + + if(body.size() > 0) { + curl_easy_setopt(HTTP_HANDLE, CURLOPT_INFILESIZE, body.size()); + } else { + curl_easy_setopt(HTTP_HANDLE, CURLOPT_INFILESIZE, 0); + } + + // curl_easy_setopt(HTTP_HANDLE, CURLOPT_VERBOSE, 1); + + curl_easy_setopt(HTTP_HANDLE, CURLOPT_URL, http->url.c_str()); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_HTTPHEADER, http->req_headers); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_READDATA, &state); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_SEEKDATA, &state); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_WRITEHEADER, &state); + curl_easy_setopt(HTTP_HANDLE, CURLOPT_WRITEDATA, &state); + + if(curl_easy_perform(HTTP_HANDLE) != 0) { + JS_ReportErrorUTF8(cx, "Failed to execute HTTP request: %s", ERRBUF); + if(state.recvbuf) JS_free(cx, state.recvbuf); + return ret; + } + + if(!state.resp_headers) { + JS_ReportErrorUTF8(cx, "Failed to recieve HTTP headers."); + if(state.recvbuf) JS_free(cx, state.recvbuf); + return ret; + } + tmp = JS::ObjectValue(*state.resp_headers); + JS::RootedValue rtmp(cx, tmp); + + if(!JS_DefineProperty( + cx, robj, + "_headers", + rtmp, + JSPROP_READONLY + )) { + JS_ReportErrorUTF8(cx, "INTERNAL: Failed to set response headers."); + if(state.recvbuf) JS_free(cx, state.recvbuf); + return ret;; + } + + if(state.recvbuf) { + state.recvbuf[state.read] = '\0'; + std::string bodystr(state.recvbuf, state.read); + jsbody = string_to_js(cx, bodystr); + if(!jsbody) { + // If we can't decode the body as UTF-8 we forcefully + // convert it to a string by just forcing each byte + // to a char16_t. + jsbody = JS_NewStringCopyN(cx, state.recvbuf, state.read); + if(!jsbody) { + if(!JS_IsExceptionPending(cx)) { + JS_ReportErrorUTF8(cx, "INTERNAL: Failed to decode body."); + } + if(state.recvbuf) JS_free(cx, state.recvbuf); + return ret; + } + } + tmp = JS::StringValue(jsbody); + } else { + tmp = JS_GetEmptyStringValue(cx); + } + + JS::RootedValue rtmp2(cx, tmp); + + if(!JS_DefineProperty( + cx, robj, + "responseText", + rtmp2, + JSPROP_READONLY + )) { + JS_ReportErrorUTF8(cx, "INTERNAL: Failed to set responseText."); + if(state.recvbuf) JS_free(cx, state.recvbuf); + return ret; + } + + ret = true; + if(state.recvbuf) JS_free(cx, state.recvbuf); + return ret; +} + +static size_t +send_body(void *ptr, size_t size, size_t nmem, void *data) +{ + CurlState* state = static_cast(data); + size_t length = size * nmem; + size_t towrite = state->sendlen - state->sent; + + // Assume this is cURL trying to resend a request that + // failed. + if(towrite == 0 && state->sent_once == 0) { + state->sent_once = 1; + return 0; + } else if(towrite == 0) { + state->sent = 0; + state->sent_once = 0; + towrite = state->sendlen; + } + + if(length < towrite) towrite = length; + + memcpy(ptr, state->sendbuf + state->sent, towrite); + state->sent += towrite; + + return towrite; +} + +static int +seek_body(void* ptr, curl_off_t offset, int origin) +{ + CurlState* state = static_cast(ptr); + if(origin != SEEK_SET) return -1; + + state->sent = static_cast(offset); + return static_cast(state->sent); +} + +static size_t +recv_header(void *ptr, size_t size, size_t nmem, void *data) +{ + CurlState* state = static_cast(data); + char code[4]; + char* header = static_cast(ptr); + size_t length = size * nmem; + JSString* hdr = NULL; + uint32_t hdrlen; + + if(length > 7 && strncasecmp(header, "HTTP/1.", 7) == 0) { + if(length < 12) { + return CURLE_WRITE_ERROR; + } + + memcpy(code, header+9, 3*sizeof(char)); + code[3] = '\0'; + state->http->last_status = atoi(code); + + state->resp_headers = JS_NewArrayObject(state->cx, 0); + if(!state->resp_headers) { + return CURLE_WRITE_ERROR; + } + + return length; + } + + // We get a notice at the \r\n\r\n after headers. + if(length <= 2) { + return length; + } + + // Append the new header to our array. + std::string hdrstr(header, length); + hdr = string_to_js(state->cx, hdrstr); + if(!hdr) { + return CURLE_WRITE_ERROR; + } + + JS::RootedObject obj(state->cx, state->resp_headers); + if(!JS_GetArrayLength(state->cx, obj, &hdrlen)) { + return CURLE_WRITE_ERROR; + } + + JS::RootedString hdrval(state->cx, hdr); + if(!JS_SetElement(state->cx, obj, hdrlen, hdrval)) { + return CURLE_WRITE_ERROR; + } + + return length; +} + +static size_t +recv_body(void *ptr, size_t size, size_t nmem, void *data) +{ + CurlState* state = static_cast(data); + size_t length = size * nmem; + char* tmp = NULL; + + if(!state->recvbuf) { + state->recvlen = 4096; + state->read = 0; + state->recvbuf = static_cast(JS_malloc( + state->cx, + state->recvlen + )); + } + + if(!state->recvbuf) { + return CURLE_WRITE_ERROR; + } + + // +1 so we can add '\0' back up in the go function. + size_t oldlen = state->recvlen; + while(length+1 > state->recvlen - state->read) state->recvlen *= 2; + tmp = static_cast(JS_realloc( + state->cx, + state->recvbuf, + oldlen, + state->recvlen + )); + if(!tmp) return CURLE_WRITE_ERROR; + state->recvbuf = tmp; + + memcpy(state->recvbuf + state->read, ptr, length); + state->read += length; + return length; +} + +#endif /* HAVE_CURL */ diff --git a/src/couch/priv/couch_js/60/utf8.h b/src/couch/priv/couch_js/68/http.h similarity index 50% rename from src/couch/priv/couch_js/60/utf8.h rename to src/couch/priv/couch_js/68/http.h index c8b1f4d8214..797b3c0606a 100644 --- a/src/couch/priv/couch_js/60/utf8.h +++ b/src/couch/priv/couch_js/68/http.h @@ -10,10 +10,18 @@ // License for the specific language governing permissions and limitations under // the License. -#ifndef COUCH_JS_UTF_8_H -#define COUCH_JS_UTF_8_H +#ifndef COUCH_JS_HTTP_H +#define COUCH_JS_HTTP_H -char* enc_string(JSContext* cx, JS::Value arg, size_t* buflen); -JSString* dec_string(JSContext* cx, const char* buf, size_t buflen); +#include "util.h" + +void http_check_enabled(); +bool http_ctor(JSContext* cx, JSObject* req); +void http_dtor(JSFreeOp* fop, JSObject* req); +bool http_open(JSContext* cx, JSObject* req, JS::Value mth, JS::Value url, JS::Value snc); +bool http_set_hdr(JSContext* cx, JSObject* req, JS::Value name, JS::Value val); +bool http_send(JSContext* cx, JSObject* req, JS::Value body); +int http_status(JSContext* cx, JSObject* req); +bool http_uri(JSContext* cx, JSObject *req, couch_args* args, JS::Value* uri); #endif diff --git a/src/couch/priv/couch_js/68/main.cpp b/src/couch/priv/couch_js/68/main.cpp new file mode 100644 index 00000000000..2c95f6129c2 --- /dev/null +++ b/src/couch/priv/couch_js/68/main.cpp @@ -0,0 +1,535 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +#include +#include +#include + +#ifdef XP_WIN +#define NOMINMAX +#include +#else +#include +#endif + +#include +#include +#include +#include +#include +#include +#include + +#include "config.h" +#include "http.h" +#include "util.h" + +static bool enableSharedMemory = true; + +static JSClassOps global_ops = { + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + JS_GlobalObjectTraceHook +}; + +/* The class of the global object. */ +static JSClass global_class = { + "global", + JSCLASS_GLOBAL_FLAGS, + &global_ops +}; + + +static void +req_dtor(JSFreeOp* fop, JSObject* obj) +{ + http_dtor(fop, obj); +} + +// With JSClass.construct. +static const JSClassOps clsOps = { + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + req_dtor, + nullptr, + nullptr, + nullptr +}; + +static const JSClass CouchHTTPClass = { + "CouchHTTP", /* name */ + JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(2), /* flags */ + &clsOps +}; + +static bool +req_ctor(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + bool ret; + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JSObject* obj = JS_NewObjectForConstructor(cx, &CouchHTTPClass, args); + if(!obj) { + JS_ReportErrorUTF8(cx, "Failed to create CouchHTTP instance"); + return false; + } + ret = http_ctor(cx, obj); + args.rval().setObject(*obj); + return ret; +} + +static bool +req_open(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS::RootedObject obj(cx); + if (!args.computeThis(cx, &obj)) + return false; + bool ret = false; + + if(argc == 2) { + ret = http_open(cx, obj, args[0], args[1], JS::BooleanValue(false)); + } else if(argc == 3) { + ret = http_open(cx, obj, args[0], args[1], args[2]); + } else { + JS_ReportErrorUTF8(cx, "Invalid call to CouchHTTP.open"); + } + + args.rval().setUndefined(); + return ret; +} + + +static bool +req_set_hdr(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS::RootedObject obj(cx); + if (!args.computeThis(cx, &obj)) + return false; + bool ret = false; + + if(argc == 2) { + ret = http_set_hdr(cx, obj, args[0], args[1]); + } else { + JS_ReportErrorUTF8(cx, "Invalid call to CouchHTTP.set_header"); + } + + args.rval().setUndefined(); + return ret; +} + + +static bool +req_send(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS::RootedObject obj(cx); + if (!args.computeThis(cx, &obj)) + return false; + bool ret = false; + + if(argc == 1) { + ret = http_send(cx, obj, args[0]); + } else { + JS_ReportErrorUTF8(cx, "Invalid call to CouchHTTP.send"); + } + + args.rval().setUndefined(); + return ret; +} + +static bool +req_status(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS::RootedObject obj(cx); + if (!args.computeThis(cx, &obj)) + return false; + + int status = http_status(cx, obj); + + if(status < 0) + return false; + + args.rval().set(JS::Int32Value(status)); + return true; +} + +static bool +base_url(JSContext *cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS::RootedObject obj(cx); + if (!args.computeThis(cx, &obj)) + return false; + + couch_args *cargs = static_cast(JS_GetContextPrivate(cx)); + JS::Value uri_val; + bool rc = http_uri(cx, obj, cargs, &uri_val); + args.rval().set(uri_val); + return rc; +} + +static JSObject* +NewSandbox(JSContext* cx, bool lazy) +{ + JS::RealmOptions options; + options.creationOptions().setSharedMemoryAndAtomicsEnabled(enableSharedMemory); + options.creationOptions().setNewCompartmentAndZone(); + JS::RootedObject obj(cx, JS_NewGlobalObject(cx, &global_class, nullptr, + JS::DontFireOnNewGlobalHook, options)); + if (!obj) + return nullptr; + + { + JSAutoRealm ac(cx, obj); + if (!lazy && !JS::InitRealmStandardClasses(cx)) + return nullptr; + + JS::RootedValue value(cx, JS::BooleanValue(lazy)); + if (!JS_DefineProperty(cx, obj, "lazy", value, JSPROP_PERMANENT | JSPROP_READONLY)) + return nullptr; + + JS_FireOnNewGlobalObject(cx, obj); + } + + if (!JS_WrapObject(cx, &obj)) + return nullptr; + return obj; +} + +static bool +evalcx(JSContext *cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + bool ret = false; + + JS::RootedString str(cx, args[0].toString()); + if (!str) + return false; + + JS::RootedObject sandbox(cx); + if (args.hasDefined(1)) { + sandbox = JS::ToObject(cx, args[1]); + if (!sandbox) + return false; + } + + if (!sandbox) { + sandbox = NewSandbox(cx, false); + if (!sandbox) + return false; + } + + JS::AutoStableStringChars strChars(cx); + if (!strChars.initTwoByte(cx, str)) + return false; + + mozilla::Range chars = strChars.twoByteRange(); + JS::SourceText srcBuf; + if (!srcBuf.init(cx, chars.begin().get(), chars.length(), + JS::SourceOwnership::Borrowed)) { + return false; + } + + if(srcBuf.length() == 0) { + args.rval().setObject(*sandbox); + } else { + mozilla::Maybe ar; + unsigned flags; + JSObject* unwrapped = UncheckedUnwrap(sandbox, true, &flags); + if (flags & js::Wrapper::CROSS_COMPARTMENT) { + sandbox = unwrapped; + ar.emplace(cx, sandbox); + } + + JS::CompileOptions opts(cx); + JS::RootedValue rval(cx); + opts.setFileAndLine("", 1); + + if (!JS::Evaluate(cx, opts, srcBuf, args.rval())) { + return false; + } + } + ret = true; + if (!JS_WrapValue(cx, args.rval())) + return false; + + return ret; +} + + +static bool +gc(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS_GC(cx); + args.rval().setUndefined(); + return true; +} + + +static bool +print(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + + bool use_stderr = false; + if(argc > 1 && args[1].isTrue()) { + use_stderr = true; + } + + if(!args[0].isString()) { + JS_ReportErrorUTF8(cx, "Unable to print non-string value."); + return false; + } + + couch_print(cx, args[0], use_stderr); + + args.rval().setUndefined(); + return true; +} + + +static bool +quit(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + + int exit_code = args[0].toInt32();; + exit(exit_code); +} + + +static bool +readline(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JSString* line; + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + + /* GC Occasionally */ + JS_MaybeGC(cx); + + line = couch_readline(cx, stdin); + if(line == NULL) return false; + + // return with JSString* instead of JSValue in the past + args.rval().setString(line); + return true; +} + + +static bool +seal(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + JS::RootedObject target(cx); + target = JS::ToObject(cx, args[0]); + if (!target) { + args.rval().setUndefined(); + return true; + } + bool deep = false; + deep = args[1].toBoolean(); + bool ret = deep ? JS_DeepFreezeObject(cx, target) : JS_FreezeObject(cx, target); + args.rval().setUndefined(); + return ret; +} + + +static bool +js_sleep(JSContext* cx, unsigned int argc, JS::Value* vp) +{ + JS::CallArgs args = JS::CallArgsFromVp(argc, vp); + + int duration = args[0].toInt32(); + +#ifdef XP_WIN + Sleep(duration); +#else + usleep(duration * 1000); +#endif + + return true; +} + +JSPropertySpec CouchHTTPProperties[] = { + JS_PSG("status", req_status, 0), + JS_PSG("base_url", base_url, 0), + JS_PS_END +}; + + +JSFunctionSpec CouchHTTPFunctions[] = { + JS_FN("_open", req_open, 3, 0), + JS_FN("_setRequestHeader", req_set_hdr, 2, 0), + JS_FN("_send", req_send, 1, 0), + JS_FS_END +}; + + +JSFunctionSpec TestSuiteFunctions[] = { + JS_FN("sleep", js_sleep, 1, 0), + JS_FS_END +}; + + +static JSFunctionSpec global_functions[] = { + JS_FN("evalcx", evalcx, 0, 0), + JS_FN("gc", gc, 0, 0), + JS_FN("print", print, 0, 0), + JS_FN("quit", quit, 0, 0), + JS_FN("readline", readline, 0, 0), + JS_FN("seal", seal, 0, 0), + JS_FS_END +}; + + +static bool +csp_allows(JSContext* cx, JS::HandleValue code) +{ + couch_args* args = static_cast(JS_GetContextPrivate(cx)); + if(args->eval) { + return true; + } else { + return false; + } +} + + +static JSSecurityCallbacks security_callbacks = { + csp_allows, + nullptr +}; + + +int +main(int argc, const char* argv[]) +{ + JSContext* cx = NULL; + JSObject* klass = NULL; + int i; + + couch_args* args = couch_parse_args(argc, argv); + + JS_Init(); + cx = JS_NewContext(args->stack_size, 8L * 1024L); + if(cx == NULL) + return 1; + + JS_SetGlobalJitCompilerOption(cx, JSJITCOMPILER_BASELINE_ENABLE, 0); + JS_SetGlobalJitCompilerOption(cx, JSJITCOMPILER_ION_ENABLE, 0); + + if (!JS::InitSelfHostedCode(cx)) + return 1; + + JS::SetWarningReporter(cx, couch_error); + JS::SetOutOfMemoryCallback(cx, couch_oom, NULL); + JS_SetContextPrivate(cx, args); + JS_SetSecurityCallbacks(cx, &security_callbacks); + + JS::RealmOptions options; + JS::RootedObject global(cx, JS_NewGlobalObject(cx, &global_class, nullptr, + JS::FireOnNewGlobalHook, options)); + if (!global) + return 1; + + JSAutoRealm ar(cx, global); + + if(!JS::InitRealmStandardClasses(cx)) + return 1; + + if(couch_load_funcs(cx, global, global_functions) != true) + return 1; + + if(args->use_http) { + http_check_enabled(); + + klass = JS_InitClass( + cx, global, + NULL, + &CouchHTTPClass, req_ctor, + 0, + CouchHTTPProperties, CouchHTTPFunctions, + NULL, NULL + ); + + if(!klass) + { + fprintf(stderr, "Failed to initialize CouchHTTP class.\n"); + exit(2); + } + } + + if(args->use_test_funs) { + if(couch_load_funcs(cx, global, TestSuiteFunctions) != true) + return 1; + } + + for(i = 0 ; args->scripts[i] ; i++) { + const char* filename = args->scripts[i]; + + // Compile and run + JS::CompileOptions options(cx); + options.setFileAndLine(filename, 1); + JS::RootedScript script(cx); + FILE* fp; + + fp = fopen(args->scripts[i], "r"); + if(fp == NULL) { + fprintf(stderr, "Failed to read file: %s\n", filename); + return 3; + } + script = JS::CompileUtf8File(cx, options, fp); + fclose(fp); + if (!script) { + JS::RootedValue exc(cx); + if(!JS_GetPendingException(cx, &exc)) { + fprintf(stderr, "Failed to compile file: %s\n", filename); + } else { + JS::RootedObject exc_obj(cx, &exc.toObject()); + JSErrorReport* report = JS_ErrorFromException(cx, exc_obj); + couch_error(cx, report); + } + return 1; + } + + JS::RootedValue result(cx); + if(JS_ExecuteScript(cx, script, &result) != true) { + JS::RootedValue exc(cx); + if(!JS_GetPendingException(cx, &exc)) { + fprintf(stderr, "Failed to execute script.\n"); + } else { + JS::RootedObject exc_obj(cx, &exc.toObject()); + JSErrorReport* report = JS_ErrorFromException(cx, exc_obj); + couch_error(cx, report); + } + } + + // Give the GC a chance to run. + JS_MaybeGC(cx); + } + + return 0; +} diff --git a/src/couch/priv/couch_js/68/util.cpp b/src/couch/priv/couch_js/68/util.cpp new file mode 100644 index 00000000000..7717f118503 --- /dev/null +++ b/src/couch/priv/couch_js/68/util.cpp @@ -0,0 +1,358 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "help.h" +#include "util.h" + +std::string +js_to_string(JSContext* cx, JS::HandleValue val) +{ + JS::AutoSaveExceptionState exc_state(cx); + JS::RootedString sval(cx); + sval = val.toString(); + + JS::UniqueChars chars(JS_EncodeStringToUTF8(cx, sval)); + if(!chars) { + JS_ClearPendingException(cx); + return std::string(); + } + + return chars.get(); +} + +bool +js_to_string(JSContext* cx, JS::HandleValue val, std::string& str) +{ + if(!val.isString()) { + return false; + } + + if(JS_GetStringLength(val.toString()) == 0) { + str = ""; + return true; + } + + std::string conv = js_to_string(cx, val); + if(!conv.size()) { + return false; + } + + str = conv; + return true; +} + +JSString* +string_to_js(JSContext* cx, const std::string& raw) +{ + JS::UTF8Chars utf8(raw.c_str(), raw.size()); + JS::UniqueTwoByteChars utf16; + size_t len; + + utf16.reset(JS::UTF8CharsToNewTwoByteCharsZ(cx, utf8, &len, js::MallocArena).get()); + if(!utf16) { + return nullptr; + } + + return JS_NewUCString(cx, std::move(utf16), len); +} + +size_t +couch_readfile(const char* file, char** outbuf_p) +{ + FILE* fp; + char fbuf[16384]; + char *buf = NULL; + char* tmp; + size_t nread = 0; + size_t buflen = 0; + + if(strcmp(file, "-") == 0) { + fp = stdin; + } else { + fp = fopen(file, "r"); + if(fp == NULL) { + fprintf(stderr, "Failed to read file: %s\n", file); + exit(3); + } + } + + while((nread = fread(fbuf, 1, 16384, fp)) > 0) { + if(buf == NULL) { + buf = new char[nread + 1]; + if(buf == NULL) { + fprintf(stderr, "Out of memory.\n"); + exit(3); + } + memcpy(buf, fbuf, nread); + } else { + tmp = new char[buflen + nread + 1]; + if(tmp == NULL) { + fprintf(stderr, "Out of memory.\n"); + exit(3); + } + memcpy(tmp, buf, buflen); + memcpy(tmp+buflen, fbuf, nread); + delete buf; + buf = tmp; + } + buflen += nread; + buf[buflen] = '\0'; + } + *outbuf_p = buf; + return buflen ; +} + +couch_args* +couch_parse_args(int argc, const char* argv[]) +{ + couch_args* args; + int i = 1; + + args = new couch_args(); + if(args == NULL) + return NULL; + + args->eval = 0; + args->use_http = 0; + args->use_test_funs = 0; + args->stack_size = 64L * 1024L * 1024L; + args->scripts = nullptr; + args->uri_file = nullptr; + args->uri = nullptr; + + while(i < argc) { + if(strcmp("-h", argv[i]) == 0) { + DISPLAY_USAGE; + exit(0); + } else if(strcmp("-V", argv[i]) == 0) { + DISPLAY_VERSION; + exit(0); + } else if(strcmp("-H", argv[i]) == 0) { + args->use_http = 1; + } else if(strcmp("-T", argv[i]) == 0) { + args->use_test_funs = 1; + } else if(strcmp("-S", argv[i]) == 0) { + args->stack_size = atoi(argv[++i]); + if(args->stack_size <= 0) { + fprintf(stderr, "Invalid stack size.\n"); + exit(2); + } + } else if(strcmp("-u", argv[i]) == 0) { + args->uri_file = argv[++i]; + } else if(strcmp("--eval", argv[i]) == 0) { + args->eval = 1; + } else if(strcmp("--", argv[i]) == 0) { + i++; + break; + } else { + break; + } + i++; + } + + if(i >= argc) { + DISPLAY_USAGE; + exit(3); + } + args->scripts = argv + i; + + return args; +} + + +int +couch_fgets(char* buf, int size, FILE* fp) +{ + int n, i, c; + + if(size <= 0) return -1; + n = size - 1; + + for(i = 0; i < n && (c = getc(fp)) != EOF; i++) { + buf[i] = c; + if(c == '\n') { + i++; + break; + } + } + + buf[i] = '\0'; + return i; +} + + +JSString* +couch_readline(JSContext* cx, FILE* fp) +{ + JSString* str; + char* bytes = NULL; + char* tmp = NULL; + size_t used = 0; + size_t byteslen = 256; + size_t oldbyteslen = 256; + size_t readlen = 0; + + bytes = static_cast(JS_malloc(cx, byteslen)); + if(bytes == NULL) return NULL; + + while((readlen = couch_fgets(bytes+used, byteslen-used, fp)) > 0) { + used += readlen; + + if(bytes[used-1] == '\n') { + bytes[used-1] = '\0'; + break; + } + + // Double our buffer and read more. + oldbyteslen = byteslen; + byteslen *= 2; + tmp = static_cast(JS_realloc(cx, bytes, oldbyteslen, byteslen)); + if(!tmp) { + JS_free(cx, bytes); + return NULL; + } + + bytes = tmp; + } + + // Treat empty strings specially + if(used == 0) { + JS_free(cx, bytes); + return JS_NewStringCopyZ(cx, nullptr); + } + + // Shrink the buffer to the actual data size + tmp = static_cast(JS_realloc(cx, bytes, byteslen, used)); + if(!tmp) { + JS_free(cx, bytes); + return NULL; + } + bytes = tmp; + byteslen = used; + + str = string_to_js(cx, std::string(tmp)); + JS_free(cx, bytes); + return str; +} + + +void +couch_print(JSContext* cx, JS::HandleValue obj, bool use_stderr) +{ + FILE *stream = stdout; + + if (use_stderr) { + stream = stderr; + } + std::string val = js_to_string(cx, obj); + fprintf(stream, "%s\n", val.c_str()); + fflush(stream); +} + + +void +couch_error(JSContext* cx, JSErrorReport* report) +{ + if(!report) { + return; + } + + if(JSREPORT_IS_WARNING(report->flags)) { + return; + } + + std::ostringstream msg; + msg << "error: " << report->message().c_str(); + + mozilla::Maybe ar; + JS::RootedValue exc(cx); + JS::RootedObject exc_obj(cx); + JS::RootedObject stack_obj(cx); + JS::RootedString stack_str(cx); + JS::RootedValue stack_val(cx); + JSPrincipals* principals = GetRealmPrincipals(js::GetContextRealm(cx)); + + if(!JS_GetPendingException(cx, &exc)) { + goto done; + } + + // Clear the exception before an JS method calls or the result is + // infinite, recursive error report generation. + JS_ClearPendingException(cx); + + exc_obj.set(exc.toObjectOrNull()); + stack_obj.set(JS::ExceptionStackOrNull(exc_obj)); + + if(!stack_obj) { + // Compilation errors don't have a stack + + msg << " at "; + + if(report->filename) { + msg << report->filename; + } else { + msg << ""; + } + + if(report->lineno) { + msg << ':' << report->lineno << ':' << report->column; + } + + goto done; + } + + if(!JS::BuildStackString(cx, principals, stack_obj, &stack_str, 2)) { + goto done; + } + + stack_val.set(JS::StringValue(stack_str)); + msg << std::endl << std::endl << js_to_string(cx, stack_val).c_str(); + +done: + msg << std::endl; + fprintf(stderr, "%s", msg.str().c_str()); +} + + +void +couch_oom(JSContext* cx, void* data) +{ + fprintf(stderr, "out of memory\n"); + exit(1); +} + + +bool +couch_load_funcs(JSContext* cx, JS::HandleObject obj, JSFunctionSpec* funcs) +{ + JSFunctionSpec* f; + for(f = funcs; f->name; f++) { + if(!JS_DefineFunction(cx, obj, f->name.string(), f->call.op, f->nargs, f->flags)) { + fprintf(stderr, "Failed to create function: %s\n", f->name.string()); + return false; + } + } + return true; +} diff --git a/src/couch/priv/couch_js/68/util.h b/src/couch/priv/couch_js/68/util.h new file mode 100644 index 00000000000..bd7843eb969 --- /dev/null +++ b/src/couch/priv/couch_js/68/util.h @@ -0,0 +1,41 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +#ifndef COUCHJS_UTIL_H +#define COUCHJS_UTIL_H + +#include + +typedef struct { + int eval; + int use_http; + int use_test_funs; + int stack_size; + const char** scripts; + const char* uri_file; + JSString* uri; +} couch_args; + +std::string js_to_string(JSContext* cx, JS::HandleValue val); +bool js_to_string(JSContext* cx, JS::HandleValue val, std::string& str); +JSString* string_to_js(JSContext* cx, const std::string& s); + +couch_args* couch_parse_args(int argc, const char* argv[]); +int couch_fgets(char* buf, int size, FILE* fp); +JSString* couch_readline(JSContext* cx, FILE* fp); +size_t couch_readfile(const char* file, char** outbuf_p); +void couch_print(JSContext* cx, JS::HandleValue str, bool use_stderr); +void couch_error(JSContext* cx, JSErrorReport* report); +void couch_oom(JSContext* cx, void* data); +bool couch_load_funcs(JSContext* cx, JS::HandleObject obj, JSFunctionSpec* funcs); + +#endif // Included util.h diff --git a/src/couch/rebar.config.script b/src/couch/rebar.config.script index 91e24d99eaa..93608cea7df 100644 --- a/src/couch/rebar.config.script +++ b/src/couch/rebar.config.script @@ -22,7 +22,7 @@ CopyIfDifferent = fun(Path, Contents) -> false -> file:write_file(Path, Contents) end -end, +end. CouchJSName = case os:type() of @@ -30,21 +30,21 @@ CouchJSName = case os:type() of "couchjs.exe"; _ -> "couchjs" -end, -CouchJSPath = filename:join(["priv", CouchJSName]), +end. +CouchJSPath = filename:join(["priv", CouchJSName]). Version = case os:getenv("COUCHDB_VERSION") of false -> string:strip(os:cmd("git describe --always"), right, $\n); Version0 -> string:strip(Version0, right) -end, +end. GitSha = case os:getenv("COUCHDB_GIT_SHA") of false -> - ""; % release builds won’t get a fallback + ""; % release builds won't get a fallback GitSha0 -> string:strip(GitSha0, right) -end, +end. CouchConfig = case filelib:is_file(os:getenv("COUCHDB_CONFIG")) of true -> @@ -59,6 +59,8 @@ SMVsn = case lists:keyfind(spidermonkey_version, 1, CouchConfig) of "1.8.5"; {_, "60"} -> "60"; + {_, "68"} -> + "68"; undefined -> "1.8.5"; {_, Unsupported} -> @@ -74,28 +76,28 @@ ConfigH = [ {"JSSCRIPT_TYPE", "JSObject*"}, {"COUCHJS_NAME", "\"" ++ CouchJSName++ "\""}, {"PACKAGE", "\"apache-couchdb\""}, - {"PACKAGE_BUGREPORT", "\"https://issues.apache.org/jira/browse/COUCHDB\""}, + {"PACKAGE_BUGREPORT", "\"https://github.com/apache/couchdb/issues\""}, {"PACKAGE_NAME", "\"Apache CouchDB\""}, {"PACKAGE_STRING", "\"Apache CouchDB " ++ Version ++ "\""}, {"PACKAGE_VERSION", "\"" ++ Version ++ "\""} -], +]. -CouchJSConfig = "priv/couch_js/" ++ SMVsn ++ "/config.h", -ConfigSrc = [["#define ", K, " ", V, $\n] || {K, V} <- ConfigH], -ConfigBin = iolist_to_binary(ConfigSrc), -ok = CopyIfDifferent(CouchJSConfig, ConfigBin), +CouchJSConfig = "priv/couch_js/" ++ SMVsn ++ "/config.h". +ConfigSrc = [["#define ", K, " ", V, $\n] || {K, V} <- ConfigH]. +ConfigBin = iolist_to_binary(ConfigSrc). +ok = CopyIfDifferent(CouchJSConfig, ConfigBin). MD5Config = case lists:keyfind(erlang_md5, 1, CouchConfig) of {erlang_md5, true} -> [{d, 'ERLANG_MD5', true}]; _ -> [] -end, +end. ProperConfig = case code:lib_dir(proper) of {error, bad_name} -> []; _ -> [{d, 'WITH_PROPER'}] -end, +end. {JS_CFLAGS, JS_LDFLAGS} = case os:type() of {win32, _} when SMVsn == "1.8.5" -> @@ -120,8 +122,13 @@ end, }; {unix, _} when SMVsn == "60" -> { - "-DXP_UNIX -I/usr/include/mozjs-60 -I/usr/local/include/mozjs-60 -std=c++14", - "-L/usr/local/lib -std=c++14 -lmozjs-60 -lm" + "-DXP_UNIX -I/usr/include/mozjs-60 -I/usr/local/include/mozjs-60 -std=c++14 -Wno-invalid-offsetof", + "-L/usr/local/lib -std=c++14 -lmozjs-60 -lm -lstdc++" + }; + {unix, _} when SMVsn == "68" -> + { + "-DXP_UNIX -I/usr/include/mozjs-68 -I/usr/local/include/mozjs-68 -std=c++14 -Wno-invalid-offsetof", + "-L/usr/local/lib -std=c++14 -lmozjs-68 -lm -lstdc++" } end. @@ -146,11 +153,12 @@ end. end; _ -> {"", ""} -end, +end. CouchJSSrc = case SMVsn of "1.8.5" -> ["priv/couch_js/1.8.5/*.c"]; - "60" -> ["priv/couch_js/60/*.cpp"] + "60" -> ["priv/couch_js/60/*.cpp"]; + "68" -> ["priv/couch_js/68/*.cpp"] end. CouchJSEnv = case SMVsn of @@ -159,26 +167,26 @@ CouchJSEnv = case SMVsn of {"CFLAGS", JS_CFLAGS ++ " " ++ CURL_CFLAGS}, {"LDFLAGS", JS_LDFLAGS ++ " " ++ CURL_LDFLAGS} ]; - "60" -> + _ -> [ {"CXXFLAGS", JS_CFLAGS ++ " " ++ CURL_CFLAGS}, {"LDFLAGS", JS_LDFLAGS ++ " " ++ CURL_LDFLAGS} ] -end, +end. -IcuPath = "priv/couch_icu_driver.so", -IcuSrc = ["priv/icu_driver/*.c"], +IcuPath = "priv/couch_icu_driver.so". +IcuSrc = ["priv/icu_driver/*.c"]. IcuEnv = [{"DRV_CFLAGS", "$DRV_CFLAGS -DPIC -O2 -fno-common"}, - {"DRV_LDFLAGS", "$DRV_LDFLAGS -lm -licuuc -licudata -licui18n -lpthread"}], + {"DRV_LDFLAGS", "$DRV_LDFLAGS -lm -licuuc -licudata -licui18n -lpthread"}]. IcuDarwinEnv = [{"CFLAGS", "-DXP_UNIX -I/usr/local/opt/icu4c/include"}, - {"LDFLAGS", "-L/usr/local/opt/icu4c/lib"}], + {"LDFLAGS", "-L/usr/local/opt/icu4c/lib"}]. IcuBsdEnv = [{"CFLAGS", "-DXP_UNIX -I/usr/local/include"}, - {"LDFLAGS", "-L/usr/local/lib"}], + {"LDFLAGS", "-L/usr/local/lib"}]. IcuWinEnv = [{"CFLAGS", "$DRV_CFLAGS /DXP_WIN"}, - {"LDFLAGS", "icuin.lib icudt.lib icuuc.lib"}], + {"LDFLAGS", "icuin.lib icudt.lib icuuc.lib"}]. -ComparePath = "priv/couch_ejson_compare.so", -CompareSrc = ["priv/couch_ejson_compare/*.c"], +ComparePath = "priv/couch_ejson_compare.so". +CompareSrc = ["priv/couch_ejson_compare/*.c"]. BaseSpecs = [ %% couchjs @@ -193,17 +201,17 @@ BaseSpecs = [ {"linux", ComparePath, CompareSrc, [{env, IcuEnv}]}, {"bsd", ComparePath, CompareSrc, [{env, IcuEnv ++ IcuBsdEnv}]}, {"win32", ComparePath, CompareSrc, [{env, IcuWinEnv}]} -], +]. SpawnSpec = [ {"priv/couchspawnkillable", ["priv/spawnkillable/*.c"]} -], +]. %% hack required until switch to enc/rebar3 PortEnvOverrides = [ {"win32", "EXE_LINK_CXX_TEMPLATE", "$LINKER $PORT_IN_FILES $LDFLAGS $EXE_LDFLAGS /OUT:$PORT_OUT_FILE"} -], +]. PortSpecs = case os:type() of {win32, _} -> @@ -213,10 +221,10 @@ PortSpecs = case os:type() of ok = CopyIfDifferent("priv/couchspawnkillable", CSK), os:cmd("chmod +x priv/couchspawnkillable"), BaseSpecs -end, +end. PlatformDefines = [ {platform_define, "win32", 'WINDOWS'} -], +]. AddConfig = [ {port_specs, PortSpecs}, {erl_opts, PlatformDefines ++ [ diff --git a/src/couch/src/couch.app.src b/src/couch/src/couch.app.src index 12ec29e1244..6116c79ba7a 100644 --- a/src/couch/src/couch.app.src +++ b/src/couch/src/couch.app.src @@ -33,7 +33,6 @@ sasl, inets, ssl, - os_mon, % Upstream deps ibrowse, diff --git a/src/couch/src/couch.erl b/src/couch/src/couch.erl index 60a8b66265c..1c912ac2a05 100644 --- a/src/couch/src/couch.erl +++ b/src/couch/src/couch.erl @@ -23,7 +23,6 @@ deps() -> [ sasl, inets, - os_mon, crypto, public_key, ssl, diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index e1d726dc95b..390a198df5b 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -37,6 +37,7 @@ get_committed_update_seq/1, get_compacted_seq/1, get_compactor_pid/1, + get_compactor_pid_sync/1, get_db_info/1, get_partition_info/2, get_del_doc_count/1, @@ -572,6 +573,14 @@ get_compacted_seq(#db{}=Db) -> get_compactor_pid(#db{compactor_pid = Pid}) -> Pid. +get_compactor_pid_sync(#db{main_pid=Pid}=Db) -> + case gen_server:call(Pid, compactor_pid, infinity) of + CPid when is_pid(CPid) -> + CPid; + _ -> + nil + end. + get_db_info(Db) -> #db{ name = Name, diff --git a/src/couch/src/couch_httpd.erl b/src/couch/src/couch_httpd.erl index ef90d6b2ad0..8f7fedd5eee 100644 --- a/src/couch/src/couch_httpd.erl +++ b/src/couch/src/couch_httpd.erl @@ -931,6 +931,8 @@ error_info({error, {illegal_database_name, Name}}) -> {400, <<"illegal_database_name">>, Message}; error_info({missing_stub, Reason}) -> {412, <<"missing_stub">>, Reason}; +error_info({misconfigured_server, Reason}) -> + {500, <<"misconfigured_server">>, couch_util:to_binary(Reason)}; error_info({Error, Reason}) -> {500, couch_util:to_binary(Error), couch_util:to_binary(Reason)}; error_info(Error) -> diff --git a/src/couch/src/couch_httpd_auth.erl b/src/couch/src/couch_httpd_auth.erl index 5e445030195..45a82bd0f7b 100644 --- a/src/couch/src/couch_httpd_auth.erl +++ b/src/couch/src/couch_httpd_auth.erl @@ -31,6 +31,8 @@ -export([cookie_auth_cookie/4, cookie_scheme/1]). -export([maybe_value/3]). +-export([jwt_authentication_handler/1]). + -import(couch_httpd, [header_value/2, send_json/2,send_json/4, send_method_not_allowed/2]). -compile({no_auto_import,[integer_to_binary/1, integer_to_binary/2]}). @@ -186,6 +188,42 @@ proxy_auth_user(Req) -> end end. +jwt_authentication_handler(Req) -> + case header_value(Req, "Authorization") of + "Bearer " ++ Jwt -> + RequiredClaims = get_configured_claims(), + case jwtf:decode(?l2b(Jwt), [alg | RequiredClaims], fun jwtf_keystore:get/2) of + {ok, {Claims}} -> + case lists:keyfind(<<"sub">>, 1, Claims) of + false -> throw({unauthorized, <<"Token missing sub claim.">>}); + {_, User} -> Req#httpd{user_ctx=#user_ctx{ + name = User, + roles = couch_util:get_value(?l2b(config:get("jwt_auth", "roles_claim_name", "_couchdb.roles")), Claims, []) + }} + end; + {error, Reason} -> + throw(Reason) + end; + _ -> Req + end. + +get_configured_claims() -> + Claims = config:get("jwt_auth", "required_claims", ""), + Re = "((?[a-z]+)|{(?[a-z]+)\s*,\s*\"(?[^\"]+)\"})", + case re:run(Claims, Re, [global, {capture, [key1, key2, val], binary}]) of + nomatch when Claims /= "" -> + couch_log:error("[jwt_auth] required_claims is set to an invalid value.", []), + throw({misconfigured_server, <<"JWT is not configured correctly">>}); + nomatch -> + []; + {match, Matches} -> + lists:map(fun to_claim/1, Matches) + end. + +to_claim([Key, <<>>, <<>>]) -> + binary_to_atom(Key, latin1); +to_claim([<<>>, Key, Value]) -> + {binary_to_atom(Key, latin1), Value}. cookie_authentication_handler(Req) -> cookie_authentication_handler(Req, couch_auth_cache). @@ -365,7 +403,8 @@ handle_session_req(#httpd{method='GET', user_ctx=UserCtx}=Req, _AuthModule) -> end; % logout by deleting the session handle_session_req(#httpd{method='DELETE'}=Req, _AuthModule) -> - Cookie = mochiweb_cookies:cookie("AuthSession", "", [{path, "/"}] ++ cookie_scheme(Req)), + Cookie = mochiweb_cookies:cookie("AuthSession", "", [{path, "/"}] ++ + cookie_domain() ++ cookie_scheme(Req)), {Code, Headers} = case couch_httpd:qs_value(Req, "next", nil) of nil -> {200, [Cookie]}; diff --git a/src/couch/src/couch_query_servers.erl b/src/couch/src/couch_query_servers.erl index c6d255f17f3..447daea6100 100644 --- a/src/couch/src/couch_query_servers.erl +++ b/src/couch/src/couch_query_servers.erl @@ -519,7 +519,7 @@ with_ddoc_proc(#doc{id=DDocId,revs={Start, [DiskRev|_]}}=DDoc, Fun) -> proc_prompt(Proc, Args) -> case proc_prompt_raw(Proc, Args) of {json, Json} -> - ?JSON_DECODE(Json); + raw_to_ejson({json, Json}); EJson -> EJson end. @@ -528,10 +528,76 @@ proc_prompt_raw(#proc{prompt_fun = {Mod, Func}} = Proc, Args) -> apply(Mod, Func, [Proc#proc.pid, Args]). raw_to_ejson({json, Json}) -> - ?JSON_DECODE(Json); + try + ?JSON_DECODE(Json) + catch throw:{invalid_json, {_, invalid_string}} -> + Forced = try + force_utf8(Json) + catch _:_ -> + Json + end, + ?JSON_DECODE(Forced) + end; raw_to_ejson(EJson) -> EJson. +force_utf8(Bin) -> + case binary:match(Bin, <<"\\u">>) of + {Start, 2} -> + <> = Bin, + {Insert, Rest3} = case check_uescape(Rest1) of + {ok, Skip} -> + <> = Rest1, + {Skipped, Rest2}; + {error, Skip} -> + <<_:Skip/binary, Rest2/binary>> = Rest1, + {<<16#EF, 16#BF, 16#BD>>, Rest2} + end, + RestForced = force_utf8(Rest3), + <>; + nomatch -> + Bin + end. + +check_uescape(Data) -> + case extract_uescape(Data) of + {Hi, Rest} when Hi >= 16#D800, Hi < 16#DC00 -> + case extract_uescape(Rest) of + {Lo, _} when Lo >= 16#DC00, Lo =< 16#DFFF -> + % A low surrogate pair + UTF16 = << + Hi:16/big-unsigned-integer, + Lo:16/big-unsigned-integer + >>, + try + [_] = xmerl_ucs:from_utf16be(UTF16), + {ok, 12} + catch _:_ -> + {error, 6} + end; + {_, _} -> + % Found a uescape that's not a low half + {error, 6}; + false -> + % No hex escape found + {error, 6} + end; + {Hi, _} when Hi >= 16#DC00, Hi =< 16#DFFF -> + % Found a low surrogate half without a high half + {error, 6}; + {_, _} -> + % Found a uescape we don't care about + {ok, 6}; + false -> + % Incomplete uescape which we don't care about + {ok, 2} + end. + +extract_uescape(<<"\\u", Code:4/binary, Rest/binary>>) -> + {binary_to_integer(Code, 16), Rest}; +extract_uescape(_) -> + false. + proc_stop(Proc) -> {Mod, Func} = Proc#proc.stop_fun, apply(Mod, Func, [Proc#proc.pid]). @@ -680,4 +746,41 @@ test_reduce(Reducer, KVs) -> {ok, Finalized} = finalize(Reducer, Reduced), Finalized. +force_utf8_test() -> + % "\uDCA5\uD83D" + Ok = [ + <<"foo">>, + <<"\\u00A0">>, + <<"\\u0032">>, + <<"\\uD83D\\uDCA5">>, + <<"foo\\uD83D\\uDCA5bar">>, + % Truncated but we doesn't break replacements + <<"\\u0FA">> + ], + lists:foreach(fun(Case) -> + ?assertEqual(Case, force_utf8(Case)) + end, Ok), + + NotOk = [ + <<"\\uDCA5">>, + <<"\\uD83D">>, + <<"fo\\uDCA5bar">>, + <<"foo\\uD83Dbar">>, + <<"\\uDCA5\\uD83D">>, + <<"\\uD83Df\\uDCA5">>, + <<"\\uDCA5\\u00A0">>, + <<"\\uD83D\\u00A0">> + ], + ToJSON = fun(Bin) -> <<34, Bin/binary, 34>> end, + lists:foreach(fun(Case) -> + try + ?assertNotEqual(Case, force_utf8(Case)), + ?assertThrow(_, ?JSON_DECODE(ToJSON(Case))), + ?assertMatch(<<_/binary>>, ?JSON_DECODE(ToJSON(force_utf8(Case)))) + catch + T:R -> + io:format(standard_error, "~p~n~p~n", [T, R]) + end + end, NotOk). + -endif. diff --git a/src/couch/src/couch_server.erl b/src/couch/src/couch_server.erl index 909e2389812..6db3f7448f3 100644 --- a/src/couch/src/couch_server.erl +++ b/src/couch/src/couch_server.erl @@ -246,6 +246,16 @@ init([]) -> % Mark being able to receive documents with an _access property as a supported feature config:enable_feature('access-ready'), + % Mark if fips is enabled + case + erlang:function_exported(crypto, info_fips, 0) andalso + crypto:info_fips() == enabled of + true -> + config:enable_feature('fips'); + false -> + ok + end, + % read config and register for configuration changes % just stop if one of the config settings change. couch_server_sup @@ -381,10 +391,13 @@ maybe_close_lru_db(#server{lru=Lru}=Server) -> end. open_async(Server, From, DbName, Options) -> + NoLRUServer = Server#server{ + lru = redacted + }, Parent = self(), T0 = os:timestamp(), Opener = spawn_link(fun() -> - Res = open_async_int(Server, DbName, Options), + Res = open_async_int(NoLRUServer, DbName, Options), IsSuccess = case Res of {ok, _} -> true; _ -> false diff --git a/src/couch/src/couch_util.erl b/src/couch/src/couch_util.erl index a785e2e44e4..95780e8cc84 100644 --- a/src/couch/src/couch_util.erl +++ b/src/couch/src/couch_util.erl @@ -21,7 +21,7 @@ -export([get_nested_json_value/2, json_user_ctx/1]). -export([proplist_apply_field/2, json_apply_field/2]). -export([to_binary/1, to_integer/1, to_list/1, url_encode/1]). --export([json_encode/1, json_decode/1]). +-export([json_encode/1, json_decode/1, json_decode/2]). -export([verify/2,simple_call/2,shutdown_sync/1]). -export([get_value/2, get_value/3]). -export([reorder_results/2]). @@ -31,6 +31,7 @@ -export([with_db/2]). -export([rfc1123_date/0, rfc1123_date/1]). -export([integer_to_boolean/1, boolean_to_integer/1]). +-export([validate_positive_int/1]). -export([find_in_binary/2]). -export([callback_exists/3, validate_callback_exists/3]). -export([with_proc/4]). @@ -498,8 +499,11 @@ json_encode(V) -> jiffy:encode(V, [force_utf8]). json_decode(V) -> + json_decode(V, []). + +json_decode(V, Opts) -> try - jiffy:decode(V, [dedupe_keys]) + jiffy:decode(V, [dedupe_keys | Opts]) catch error:Error -> throw({invalid_json, Error}) @@ -621,6 +625,17 @@ boolean_to_integer(false) -> 0. +validate_positive_int(N) when is_list(N) -> + try + I = list_to_integer(N), + validate_positive_int(I) + catch error:badarg -> + false + end; +validate_positive_int(N) when is_integer(N), N > 0 -> true; +validate_positive_int(_) -> false. + + find_in_binary(_B, <<>>) -> not_found; diff --git a/src/couch/test/eunit/couch_js_tests.erl b/src/couch/test/eunit/couch_js_tests.erl index cd6452cf98f..693cd977288 100644 --- a/src/couch/test/eunit/couch_js_tests.erl +++ b/src/couch/test/eunit/couch_js_tests.erl @@ -14,17 +14,6 @@ -include_lib("eunit/include/eunit.hrl"). --define(FUNC, << - "var state = [];\n" - "function(doc) {\n" - " var val = \"0123456789ABCDEF\";\n" - " for(var i = 0; i < 165535; i++) {\n" - " state.push([val, val]);\n" - " }\n" - "}\n" ->>). - - couch_js_test_() -> { "Test couchjs", @@ -33,15 +22,141 @@ couch_js_test_() -> fun test_util:start_couch/0, fun test_util:stop_couch/1, [ + fun should_create_sandbox/0, + fun should_roundtrip_utf8/0, + fun should_roundtrip_modified_utf8/0, + fun should_replace_broken_utf16/0, + fun should_allow_js_string_mutations/0, {timeout, 60000, fun should_exit_on_oom/0} ] } }. +should_create_sandbox() -> + % Try and detect whether we can see out of the + % sandbox or not. + Src = << + "function(doc) {\n" + " try {\n" + " emit(false, typeof(Couch.compile_function));\n" + " } catch (e) {\n" + " emit(true, e.message);\n" + " }\n" + "}\n" + >>, + Proc = couch_query_servers:get_os_process(<<"javascript">>), + true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]), + Result = couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, <<"{}">>]), + ?assertEqual([[[true, <<"Couch is not defined">>]]], Result). + + +should_roundtrip_utf8() -> + % Try round tripping UTF-8 both directions through + % couchjs. These tests use hex encoded values of + % Ä (C384) and Ü (C39C) so as to avoid odd editor/Erlang encoding + % strangeness. + Src = << + "function(doc) {\n" + " emit(doc.value, \"", 16#C3, 16#9C, "\");\n" + "}\n" + >>, + Proc = couch_query_servers:get_os_process(<<"javascript">>), + true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]), + Doc = {[ + {<<"value">>, <<16#C3, 16#84>>} + ]}, + Result = couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, Doc]), + ?assertEqual([[[<<16#C3, 16#84>>, <<16#C3, 16#9C>>]]], Result). + + +should_roundtrip_modified_utf8() -> + % Mimicing the test case from the mailing list + Src = << + "function(doc) {\n" + " emit(doc.value.toLowerCase(), \"", 16#C3, 16#9C, "\");\n" + "}\n" + >>, + Proc = couch_query_servers:get_os_process(<<"javascript">>), + true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]), + Doc = {[ + {<<"value">>, <<16#C3, 16#84>>} + ]}, + Result = couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, Doc]), + ?assertEqual([[[<<16#C3, 16#A4>>, <<16#C3, 16#9C>>]]], Result). + + +should_replace_broken_utf16() -> + % This test reverse the surrogate pair of + % the Boom emoji U+1F4A5 + Src = << + "function(doc) {\n" + " emit(doc.value.split(\"\").reverse().join(\"\"), 1);\n" + "}\n" + >>, + Proc = couch_query_servers:get_os_process(<<"javascript">>), + true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]), + Doc = {[ + {<<"value">>, list_to_binary(xmerl_ucs:to_utf8([16#1F4A5]))} + ]}, + Result = couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, Doc]), + % Invalid UTF-8 gets replaced with the 16#FFFD replacement + % marker + Markers = list_to_binary(xmerl_ucs:to_utf8([16#FFFD, 16#FFFD])), + ?assertEqual([[[Markers, 1]]], Result). + + +should_allow_js_string_mutations() -> + % This binary corresponds to this string: мама мыла раму + % Which I'm told translates to: "mom was washing the frame" + MomWashedTheFrame = << + 16#D0, 16#BC, 16#D0, 16#B0, 16#D0, 16#BC, 16#D0, 16#B0, 16#20, + 16#D0, 16#BC, 16#D1, 16#8B, 16#D0, 16#BB, 16#D0, 16#B0, 16#20, + 16#D1, 16#80, 16#D0, 16#B0, 16#D0, 16#BC, 16#D1, 16#83 + >>, + Mom = <<16#D0, 16#BC, 16#D0, 16#B0, 16#D0, 16#BC, 16#D0, 16#B0>>, + Washed = <<16#D0, 16#BC, 16#D1, 16#8B, 16#D0, 16#BB, 16#D0, 16#B0>>, + Src1 = << + "function(doc) {\n" + " emit(\"length\", doc.value.length);\n" + "}\n" + >>, + Src2 = << + "function(doc) {\n" + " emit(\"substring\", doc.value.substring(5, 9));\n" + "}\n" + >>, + Src3 = << + "function(doc) {\n" + " emit(\"slice\", doc.value.slice(0, 4));\n" + "}\n" + >>, + Proc = couch_query_servers:get_os_process(<<"javascript">>), + true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src1]), + true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src2]), + true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src3]), + Doc = {[{<<"value">>, MomWashedTheFrame}]}, + Result = couch_query_servers:proc_prompt(Proc, [<<"map_doc">>, Doc]), + Expect = [ + [[<<"length">>, 14]], + [[<<"substring">>, Washed]], + [[<<"slice">>, Mom]] + ], + ?assertEqual(Expect, Result). + + should_exit_on_oom() -> + Src = << + "var state = [];\n" + "function(doc) {\n" + " var val = \"0123456789ABCDEF\";\n" + " for(var i = 0; i < 165535; i++) {\n" + " state.push([val, val]);\n" + " }\n" + "}\n" + >>, Proc = couch_query_servers:get_os_process(<<"javascript">>), - true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, ?FUNC]), + true = couch_query_servers:proc_prompt(Proc, [<<"add_fun">>, Src]), trigger_oom(Proc). trigger_oom(Proc) -> diff --git a/src/couch/test/eunit/couch_util_tests.erl b/src/couch/test/eunit/couch_util_tests.erl index 3e145c4f610..012c961a4c7 100644 --- a/src/couch/test/eunit/couch_util_tests.erl +++ b/src/couch/test/eunit/couch_util_tests.erl @@ -168,3 +168,10 @@ to_hex_test_() -> ?_assertEqual("", couch_util:to_hex(<<>>)), ?_assertEqual("010203faff", couch_util:to_hex(<<1, 2, 3, 250, 255>>)) ]. + +json_decode_test_() -> + [ + ?_assertEqual({[]}, couch_util:json_decode(<<"{}">>)), + ?_assertEqual({[]}, couch_util:json_decode(<<"{}">>, [])), + ?_assertEqual(#{}, couch_util:json_decode(<<"{}">>, [return_maps])) + ]. diff --git a/src/couch/test/eunit/couchdb_cookie_domain_tests.erl b/src/couch/test/eunit/couchdb_cookie_domain_tests.erl index e66ab31e67f..c46352f35b1 100755 --- a/src/couch/test/eunit/couchdb_cookie_domain_tests.erl +++ b/src/couch/test/eunit/couchdb_cookie_domain_tests.erl @@ -43,7 +43,8 @@ cookie_test_() -> fun({ok, Url, ContentType, Payload, _}) -> [ should_set_cookie_domain(Url, ContentType, Payload), - should_not_set_cookie_domain(Url, ContentType, Payload) + should_not_set_cookie_domain(Url, ContentType, Payload), + should_delete_cookie_domain(Url, ContentType, Payload) ] end } @@ -67,3 +68,13 @@ should_not_set_cookie_domain(Url, ContentType, Payload) -> Cookie = proplists:get_value("Set-Cookie", Headers), ?assertEqual(0, string:str(Cookie, "; Domain=")) end). + +should_delete_cookie_domain(Url, ContentType, Payload) -> + ?_test(begin + ok = config:set("couch_httpd_auth", "cookie_domain", + "example.com", false), + {ok, Code, Headers, _} = test_request:delete(Url, ContentType, Payload), + ?assertEqual(200, Code), + Cookie = proplists:get_value("Set-Cookie", Headers), + ?assert(string:str(Cookie, "; Domain=example.com") > 0) + end). diff --git a/src/couch_index/src/couch_index_server.erl b/src/couch_index/src/couch_index_server.erl index 49d1e61b741..6bebff2d8ed 100644 --- a/src/couch_index/src/couch_index_server.erl +++ b/src/couch_index/src/couch_index_server.erl @@ -243,9 +243,15 @@ reset_indexes(DbName, Root) -> end, dict:new(), ets:lookup(?BY_DB, DbName)), Fun = fun({Sig, DDocIds}) -> [{_, Pid}] = ets:lookup(?BY_SIG, {DbName, Sig}), - MRef = erlang:monitor(process, Pid), + unlink(Pid), gen_server:cast(Pid, delete), - receive {'DOWN', MRef, _, _, _} -> ok end, + receive + {'EXIT', Pid, _} -> + ok + after + 0 -> + ok + end, rem_from_ets(DbName, Sig, DDocIds, Pid) end, lists:foreach(Fun, dict:to_list(SigDDocIds)), @@ -278,7 +284,11 @@ handle_db_event(<<"shards/", _/binary>> = DbName, {ddoc_updated, DDocResult = couch_util:with_db(DbName, fun(Db) -> couch_db:open_doc(Db, DDocId, [ejson_body, ?ADMIN_CTX]) end), - DbShards = [mem3:name(Sh) || Sh <- mem3:local_shards(mem3:dbname(DbName))], + LocalShards = try mem3:local_shards(mem3:dbname(DbName)) + catch error:database_does_not_exist -> + [] + end, + DbShards = [mem3:name(Sh) || Sh <- LocalShards], lists:foreach(fun(DbShard) -> lists:foreach(fun({_DbShard, {_DDocId, Sig}}) -> % check if there are other ddocs with the same Sig for the same db diff --git a/src/couch_mrview/src/couch_mrview_index.erl b/src/couch_mrview/src/couch_mrview_index.erl index c96d8717391..68f1d232217 100644 --- a/src/couch_mrview/src/couch_mrview_index.erl +++ b/src/couch_mrview/src/couch_mrview_index.erl @@ -133,6 +133,12 @@ open(Db, State0) -> NewSt = couch_mrview_util:reset_index(Db, Fd, State), ensure_local_purge_doc(Db, NewSt), {ok, NewSt}; + {ok, Else} -> + couch_log:error("~s has a bad header: got ~p", + [IndexFName, Else]), + NewSt = couch_mrview_util:reset_index(Db, Fd, State), + ensure_local_purge_doc(Db, NewSt), + {ok, NewSt}; no_valid_header -> NewSt = couch_mrview_util:reset_index(Db, Fd, State), ensure_local_purge_doc(Db, NewSt), @@ -252,16 +258,7 @@ set_partitioned(Db, State) -> DbPartitioned = couch_db:is_partitioned(Db), ViewPartitioned = couch_util:get_value( <<"partitioned">>, DesignOpts, DbPartitioned), - IsPartitioned = case {DbPartitioned, ViewPartitioned} of - {true, true} -> - true; - {true, false} -> - false; - {false, false} -> - false; - _ -> - throw({bad_request, <<"invalid partition option">>}) - end, + IsPartitioned = DbPartitioned andalso ViewPartitioned, State#mrst{partitioned = IsPartitioned}. diff --git a/src/couch_mrview/src/couch_mrview_util.erl b/src/couch_mrview/src/couch_mrview_util.erl index e971720c9ad..d318a3f4a8b 100644 --- a/src/couch_mrview/src/couch_mrview_util.erl +++ b/src/couch_mrview/src/couch_mrview_util.erl @@ -425,9 +425,12 @@ validate_args(#mrst{} = State, Args0) -> apply_limit(ViewPartitioned, Args) -> - LimitType = case ViewPartitioned of - true -> "partition_query_limit"; - false -> "query_limit" + Options = Args#mrargs.extra, + IgnorePQLimit = lists:keyfind(ignore_partition_query_limit, 1, Options), + LimitType = case {ViewPartitioned, IgnorePQLimit} of + {true, false} -> "partition_query_limit"; + {true, _} -> "query_limit"; + {false, _} -> "query_limit" end, MaxLimit = config:get_integer("query_server_config", diff --git a/src/couch_replicator/src/couch_replicator.erl b/src/couch_replicator/src/couch_replicator.erl index b38f31b5996..b169dccb10d 100644 --- a/src/couch_replicator/src/couch_replicator.erl +++ b/src/couch_replicator/src/couch_replicator.erl @@ -141,7 +141,11 @@ strip_url_creds(Endpoint) -> iolist_to_binary(couch_util:url_strip_password(Url)) catch throw:{error, local_endpoints_not_supported} -> - Endpoint + Endpoint; + error:_ -> + % Avoid exposing any part of the URL in case there is a password in + % the malformed endpoint URL + null end. @@ -356,7 +360,8 @@ strip_url_creds_test_() -> [ t_strip_http_basic_creds(), t_strip_http_props_creds(), - t_strip_local_db_creds() + t_strip_local_db_creds(), + t_strip_url_creds_errors() ] }. @@ -389,4 +394,23 @@ t_strip_http_props_creds() -> ?assertEqual(<<"http://host/db/">>, strip_url_creds(Props2)) end). + +t_strip_url_creds_errors() -> + ?_test(begin + Bad1 = {[{<<"url">>, <<"http://adm:pass/bad">>}]}, + ?assertEqual(null, strip_url_creds(Bad1)), + Bad2 = {[{<<"garbage">>, <<"more garbage">>}]}, + ?assertEqual(null, strip_url_creds(Bad2)), + Bad3 = <<"http://a:b:c">>, + ?assertEqual(null, strip_url_creds(Bad3)), + Bad4 = <<"http://adm:pass:pass/bad">>, + ?assertEqual(null, strip_url_creds(Bad4)), + ?assertEqual(null, strip_url_creds(null)), + ?assertEqual(null, strip_url_creds(42)), + ?assertEqual(null, strip_url_creds([<<"a">>, <<"b">>])), + Bad5 = {[{<<"source_proxy">>, <<"http://adm:pass/bad">>}]}, + ?assertEqual(null, strip_url_creds(Bad5)) + end). + + -endif. diff --git a/src/couch_replicator/src/couch_replicator_api_wrap.erl b/src/couch_replicator/src/couch_replicator_api_wrap.erl index a21de4242c1..8dc7f2f013f 100644 --- a/src/couch_replicator/src/couch_replicator_api_wrap.erl +++ b/src/couch_replicator/src/couch_replicator_api_wrap.erl @@ -546,7 +546,7 @@ options_to_query_args(HttpDb, Path, Options0) -> length("GET " ++ FullUrl ++ " HTTP/1.1\r\n") + length("&atts_since=") + 6, % +6 = % encoded [ and ] PAs, MaxLen, []), - [{"atts_since", ?JSON_ENCODE(RevList)} | QueryArgs1] + [{"atts_since", ?b2l(iolist_to_binary(?JSON_ENCODE(RevList)))} | QueryArgs1] end. diff --git a/src/couch_replicator/src/couch_replicator_scheduler.erl b/src/couch_replicator/src/couch_replicator_scheduler.erl index 53c040e8c15..641443a7c72 100644 --- a/src/couch_replicator/src/couch_replicator_scheduler.erl +++ b/src/couch_replicator/src/couch_replicator_scheduler.erl @@ -456,6 +456,9 @@ pending_jobs(Count) when is_integer(Count), Count > 0 -> [Job || {_Started, Job} <- gb_sets:to_list(Set1)]. +pending_fold(#job{pid = Pid}, Acc) when is_pid(Pid) -> + Acc; + pending_fold(Job, {Set, Now, Count, HealthThreshold}) -> Set1 = case {not_recently_crashed(Job, Now, HealthThreshold), gb_sets:size(Set) >= Count} of @@ -1051,6 +1054,7 @@ scheduler_test_() -> [ t_pending_jobs_simple(), t_pending_jobs_skip_crashed(), + t_pending_jobs_skip_running(), t_one_job_starts(), t_no_jobs_start_if_max_is_0(), t_one_job_starts_if_max_is_1(), @@ -1112,6 +1116,18 @@ t_pending_jobs_skip_crashed() -> end). +t_pending_jobs_skip_running() -> + ?_test(begin + Job1 = continuous(1), + Job2 = continuous_running(2), + Job3 = oneshot(3), + Job4 = oneshot_running(4), + Jobs = [Job1, Job2, Job3, Job4], + setup_jobs(Jobs), + ?assertEqual([Job1, Job3], pending_jobs(4)) + end). + + t_one_job_starts() -> ?_test(begin setup_jobs([oneshot(1)]), diff --git a/src/dreyfus/src/dreyfus_fabric_cleanup.erl b/src/dreyfus/src/dreyfus_fabric_cleanup.erl index 2840a2f2d55..6817127487d 100644 --- a/src/dreyfus/src/dreyfus_fabric_cleanup.erl +++ b/src/dreyfus/src/dreyfus_fabric_cleanup.erl @@ -30,12 +30,16 @@ go(DbName) -> ok. active_sigs(#doc{body={Fields}}=Doc) -> - {RawIndexes} = couch_util:get_value(<<"indexes">>, Fields, {[]}), - {IndexNames, _} = lists:unzip(RawIndexes), - [begin - {ok, Index} = dreyfus_index:design_doc_to_index(Doc, IndexName), - Index#index.sig - end || IndexName <- IndexNames]. + try + {RawIndexes} = couch_util:get_value(<<"indexes">>, Fields, {[]}), + {IndexNames, _} = lists:unzip(RawIndexes), + [begin + {ok, Index} = dreyfus_index:design_doc_to_index(Doc, IndexName), + Index#index.sig + end || IndexName <- IndexNames] + catch error:{badmatch, _Error} -> + [] + end. cleanup_local_purge_doc(DbName, ActiveSigs) -> {ok, BaseDir} = clouseau_rpc:get_root_dir(), diff --git a/src/dreyfus/src/dreyfus_httpd.erl b/src/dreyfus/src/dreyfus_httpd.erl index 346f5ede64d..007dace8f28 100644 --- a/src/dreyfus/src/dreyfus_httpd.erl +++ b/src/dreyfus/src/dreyfus_httpd.erl @@ -239,6 +239,8 @@ validate_index_query(counts, Value, Args) -> Args#index_query_args{counts=Value}; validate_index_query(ranges, Value, Args) -> Args#index_query_args{ranges=Value}; +validate_index_query(drilldown, [[_|_]|_] = Value, Args) -> + Args#index_query_args{drilldown=Value}; validate_index_query(drilldown, Value, Args) -> DrillDown = Args#index_query_args.drilldown, Args#index_query_args{drilldown=[Value|DrillDown]}; @@ -447,10 +449,15 @@ validate_search_restrictions(Db, DDoc, Args) -> q = Query, partition = Partition, grouping = Grouping, - limit = Limit + limit = Limit, + counts = Counts, + drilldown = Drilldown, + ranges = Ranges } = Args, #grouping{ - by = GroupBy + by = GroupBy, + limit = GroupLimit, + sort = GroupSort } = Grouping, case Query of @@ -496,9 +503,18 @@ validate_search_restrictions(Db, DDoc, Args) -> parse_non_negative_int_param("limit", Limit, "max_limit", MaxLimit) end, - case GroupBy /= nil andalso is_binary(Partition) of + DefaultArgs = #index_query_args{}, + + case is_binary(Partition) andalso ( + Counts /= DefaultArgs#index_query_args.counts + orelse Drilldown /= DefaultArgs#index_query_args.drilldown + orelse Ranges /= DefaultArgs#index_query_args.ranges + orelse GroupSort /= DefaultArgs#index_query_args.grouping#grouping.sort + orelse GroupBy /= DefaultArgs#index_query_args.grouping#grouping.by + orelse GroupLimit /= DefaultArgs#index_query_args.grouping#grouping.limit + ) of true -> - Msg5 = <<"`group_by` and `partition` are incompatible">>, + Msg5 = <<"`partition` and any of `drilldown`, `ranges`, `group_field`, `group_sort`, `group_limit` or `group_by` are incompatible">>, throw({bad_request, Msg5}); false -> ok diff --git a/src/dreyfus/test/elixir/test/partition_search_test.exs b/src/dreyfus/test/elixir/test/partition_search_test.exs index 19a915ad387..12199544923 100644 --- a/src/dreyfus/test/elixir/test/partition_search_test.exs +++ b/src/dreyfus/test/elixir/test/partition_search_test.exs @@ -21,7 +21,7 @@ defmodule PartitionSearchTest do } end - resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs}, query: %{w: 3}) + resp = Couch.post("/#{db_name}/_bulk_docs", headers: ["Content-Type": "application/json"], body: %{:docs => docs}, query: %{w: 3}) assert resp.status_code in [201, 202] end @@ -166,7 +166,7 @@ defmodule PartitionSearchTest do resp = Couch.get(url, query: %{q: "some:field"}) assert resp.status_code == 200 ids = get_ids(resp) - assert ids == ["bar:1", "bar:5", "bar:9", "foo:2", "bar:3", "foo:4", "foo:6", "bar:7", "foo:8", "foo:10"] + assert Enum.sort(ids) == Enum.sort(["bar:1", "bar:5", "bar:9", "foo:2", "bar:3", "foo:4", "foo:6", "bar:7", "foo:8", "foo:10"]) end @tag :with_db @@ -179,7 +179,7 @@ defmodule PartitionSearchTest do resp = Couch.get(url, query: %{q: "some:field"}) assert resp.status_code == 200 ids = get_ids(resp) - assert ids == ["bar:1", "bar:5", "bar:9", "foo:2", "bar:3", "foo:4", "foo:6", "bar:7", "foo:8", "foo:10"] + assert Enum.sort(ids) == Enum.sort(["bar:1", "bar:5", "bar:9", "foo:2", "bar:3", "foo:4", "foo:6", "bar:7", "foo:8", "foo:10"]) end @tag :with_db @@ -192,7 +192,7 @@ defmodule PartitionSearchTest do resp = Couch.get(url, query: %{q: "some:field", limit: 3}) assert resp.status_code == 200 ids = get_ids(resp) - assert ids == ["bar:1", "bar:5", "bar:9"] + assert Enum.sort(ids) == Enum.sort(["bar:1", "bar:5", "bar:9"]) end @tag :with_db @@ -216,4 +216,32 @@ defmodule PartitionSearchTest do resp = Couch.post(url, body: %{q: "some:field", partition: "bar"}) assert resp.status_code == 400 end + + @tag :with_partitioned_db + test "restricted parameters are not allowed in query or body", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + body = %{q: "some:field", partition: "foo"} + + Enum.each( + [ + {:counts, "[\"type\"]"}, + {:group_field, "some"}, + {:ranges, :jiffy.encode(%{price: %{cheap: "[0 TO 100]"}})}, + {:drilldown, "[\"key\",\"a\"]"}, + ], + fn {key, value} -> + url = "/#{db_name}/_partition/foo/_design/library/_search/books" + bannedparam = Map.put(body, key, value) + get_resp = Couch.get(url, query: bannedparam) + %{:body => %{"reason" => get_reason}} = get_resp + assert Regex.match?(~r/are incompatible/, get_reason) + post_resp = Couch.post(url, body: bannedparam) + %{:body => %{"reason" => post_reason}} = post_resp + assert Regex.match?(~r/are incompatible/, post_reason) + end + ) + end end diff --git a/src/dreyfus/test/elixir/test/search_test.exs b/src/dreyfus/test/elixir/test/search_test.exs new file mode 100644 index 00000000000..829b3395fc4 --- /dev/null +++ b/src/dreyfus/test/elixir/test/search_test.exs @@ -0,0 +1,226 @@ +defmodule SearchTest do + use CouchTestCase + + @moduletag :search + + @moduledoc """ + Test search + """ + + def create_search_docs(db_name) do + resp = Couch.post("/#{db_name}/_bulk_docs", + headers: ["Content-Type": "application/json"], + body: %{:docs => [ + %{"item" => "apple", "place" => "kitchen", "state" => "new"}, + %{"item" => "banana", "place" => "kitchen", "state" => "new"}, + %{"item" => "carrot", "place" => "kitchen", "state" => "old"}, + %{"item" => "date", "place" => "lobby", "state" => "unknown"}, + ]} + ) + assert resp.status_code in [201, 202] + end + + def create_ddoc(db_name, opts \\ %{}) do + default_ddoc = %{ + indexes: %{ + fruits: %{ + analyzer: %{name: "standard"}, + index: "function (doc) {\n index(\"item\", doc.item, {facet: true});\n index(\"place\", doc.place, {facet: true});\n index(\"state\", doc.state, {facet: true});\n}" + } + } + } + + ddoc = Enum.into(opts, default_ddoc) + + resp = Couch.put("/#{db_name}/_design/inventory", body: ddoc) + assert resp.status_code in [201, 202] + assert Map.has_key?(resp.body, "ok") == true + end + + def create_invalid_ddoc(db_name, opts \\ %{}) do + invalid_ddoc = %{ + :indexes => [ + %{"name" => "foo", "ddoc" => "bar", "type" => "text"}, + ] + } + + ddoc = Enum.into(opts, invalid_ddoc) + + resp = Couch.put("/#{db_name}/_design/search", body: ddoc) + assert resp.status_code in [201, 202] + assert Map.has_key?(resp.body, "ok") == true + end + + def get_items (resp) do + %{:body => %{"rows" => rows}} = resp + Enum.map(rows, fn row -> row["doc"]["item"] end) + end + + @tag :with_db + test "search returns all items for GET", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.get(url, query: %{q: "*:*", include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == Enum.sort(["apple", "banana", "carrot", "date"]) + end + + @tag :with_db + test "drilldown single key single value for GET", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.get(url, query: %{q: "*:*", drilldown: :jiffy.encode(["place", "kitchen"]), include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == Enum.sort(["apple", "banana", "carrot"]) + end + + @tag :with_db + test "drilldown single key multiple values for GET", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.get(url, query: %{q: "*:*", drilldown: :jiffy.encode(["state", "new", "unknown"]), include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == Enum.sort(["apple", "banana", "date"]) + end + + @tag :with_db + test "drilldown multiple keys single values for GET", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.get(url, query: %{q: "*:*", drilldown: :jiffy.encode([["state", "old"], ["item", "apple"]]), include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == [] + end + + @tag :with_db + test "drilldown multiple query definitions for GET", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits?q=*:*&drilldown=[\"state\",\"old\"]&drilldown=[\"item\",\"apple\"]&include_docs=true" + resp = Couch.get(url) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == [] + end + + + @tag :with_db + test "search returns all items for POST", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.post(url, body: %{q: "*:*", include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == Enum.sort(["apple", "banana", "carrot", "date"]) + end + + @tag :with_db + test "drilldown single key single value for POST", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.post(url, body: %{query: "*:*", drilldown: ["place", "kitchen"], include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == Enum.sort(["apple", "banana", "carrot"]) + end + + @tag :with_db + test "drilldown single key multiple values for POST", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.post(url, body: %{query: "*:*", drilldown: ["state", "new", "unknown"], include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == Enum.sort(["apple", "banana", "date"]) + end + + @tag :with_db + test "drilldown multiple keys single values for POST", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.post(url, body: %{q: "*:*", drilldown: [["state", "old"], ["item", "apple"]], include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == [] + end + + @tag :with_db + test "drilldown three keys single values for POST", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.post(url, body: %{q: "*:*", drilldown: [["place", "kitchen"], ["state", "new"], ["item", "apple"]], include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == ["apple"] + end + + @tag :with_db + test "drilldown multiple keys multiple values for POST", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.post(url, body: %{q: "*:*", drilldown: [["state", "old", "new"], ["item", "apple"]], include_docs: true}) + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == ["apple"] + end + + @tag :with_db + test "drilldown multiple query definitions for POST", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + + url = "/#{db_name}/_design/inventory/_search/fruits" + resp = Couch.post(url, body: "{\"include_docs\": true, \"q\": \"*:*\", \"drilldown\": [\"state\", \"old\"], \"drilldown\": [\"item\", \"apple\"]}") + assert resp.status_code == 200 + ids = get_items(resp) + assert Enum.sort(ids) == ["apple"] + end + + @tag :with_db + test "clean up search index with invalid design document", context do + db_name = context[:db_name] + create_search_docs(db_name) + create_ddoc(db_name) + create_invalid_ddoc(db_name) + + resp = Couch.post("/#{db_name}/_search_cleanup") + assert resp.status_code in [201, 202] + end +end diff --git a/src/fabric/src/fabric_rpc.erl b/src/fabric/src/fabric_rpc.erl index 7b688b2b9d9..85da3ff121c 100644 --- a/src/fabric/src/fabric_rpc.erl +++ b/src/fabric/src/fabric_rpc.erl @@ -439,7 +439,7 @@ get_node_seqs(Db, Nodes) -> get_or_create_db(DbName, Options) -> - couch_db:open_int(DbName, [{create_if_missing, true} | Options]). + mem3_util:get_or_create_db(DbName, Options). get_view_cb(#mrargs{extra = Options}) -> @@ -515,7 +515,8 @@ changes_enumerator(DocInfo, Acc) -> [] -> ChangesRow = {no_pass, [ {pending, Pending-1}, - {seq, Seq}]}; + {seq, {Seq, uuid(Db), couch_db:owner_of(Epochs, Seq)}} + ]}; Results -> Opts = if Conflicts -> [conflicts | DocOptions]; true -> DocOptions end, ChangesRow = {change, [ diff --git a/src/fabric/test/eunit/fabric_rpc_tests.erl b/src/fabric/test/eunit/fabric_rpc_tests.erl new file mode 100644 index 00000000000..b94caf659b3 --- /dev/null +++ b/src/fabric/test/eunit/fabric_rpc_tests.erl @@ -0,0 +1,181 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(fabric_rpc_tests). + + +-include_lib("couch/include/couch_eunit.hrl"). +-include_lib("couch/include/couch_db.hrl"). + + +-define(TDEF(A), {A, fun A/1}). + + +main_test_() -> + { + setup, + spawn, + fun setup_all/0, + fun teardown_all/1, + [ + { + foreach, + fun setup_no_db_or_config/0, + fun teardown_db/1, + lists:map(fun wrap/1, [ + ?TDEF(t_no_config_non_shard_db_create_succeeds) + ]) + }, + { + foreach, + fun setup_shard/0, + fun teardown_noop/1, + lists:map(fun wrap/1, [ + ?TDEF(t_no_db), + ?TDEF(t_no_config_db_create_fails_for_shard), + ?TDEF(t_no_config_db_create_fails_for_shard_rpc) + ]) + }, + { + foreach, + fun setup_shard/0, + fun teardown_db/1, + lists:map(fun wrap/1, [ + ?TDEF(t_db_create_with_config) + ]) + } + + ] + }. + + +setup_all() -> + test_util:start_couch([rexi, mem3, fabric]). + + +teardown_all(Ctx) -> + test_util:stop_couch(Ctx). + + +setup_no_db_or_config() -> + ?tempdb(). + + +setup_shard() -> + ?tempshard(). + + +teardown_noop(_DbName) -> + ok. + +teardown_db(DbName) -> + ok = couch_server:delete(DbName, []). + + +wrap({Name, Fun}) -> + fun(Arg) -> + {timeout, 60, {atom_to_list(Name), fun() -> + process_flag(trap_exit, true), + Fun(Arg) + end}} + end. + + +t_no_db(DbName) -> + ?assertEqual({not_found, no_db_file}, couch_db:open_int(DbName, [?ADMIN_CTX])). + + +t_no_config_non_shard_db_create_succeeds(DbName) -> + ?assertEqual({not_found, no_db_file}, couch_db:open_int(DbName, [?ADMIN_CTX])), + ?assertEqual(DbName, mem3:dbname(DbName)), + ?assertMatch({ok, _}, mem3_util:get_or_create_db(DbName, [?ADMIN_CTX])). + + +t_no_config_db_create_fails_for_shard(DbName) -> + ?assertEqual({not_found, no_db_file}, couch_db:open_int(DbName, [?ADMIN_CTX])), + ?assertException(throw, {error, missing_target}, mem3_util:get_or_create_db(DbName, [?ADMIN_CTX])). + + +t_no_config_db_create_fails_for_shard_rpc(DbName) -> + ?assertEqual({not_found, no_db_file}, couch_db:open_int(DbName, [?ADMIN_CTX])), + ?assertException(throw, {error, missing_target}, mem3_util:get_or_create_db(DbName, [?ADMIN_CTX])), + MFA = {fabric_rpc, get_db_info, [DbName]}, + Ref = rexi:cast(node(), self(), MFA), + Resp = receive + Resp0 -> Resp0 + end, + ?assertMatch({Ref, {'rexi_EXIT', {{error, missing_target}, _}}}, Resp). + + +t_db_create_with_config(DbName) -> + MDbName = mem3:dbname(DbName), + DbDoc = #doc{id = MDbName, body = test_db_doc()}, + + ?assertEqual({not_found, no_db_file}, couch_db:open_int(DbName, [?ADMIN_CTX])), + + %% Write the dbs db config + couch_util:with_db(mem3_sync:shards_db(), fun(Db) -> + ?assertEqual({not_found, missing}, couch_db:open_doc(Db, MDbName, [ejson_body])), + ?assertMatch({ok, _}, couch_db:update_docs(Db, [DbDoc])) + end), + + %% Test get_or_create_db loads the properties as expected + couch_util:with_db(mem3_sync:shards_db(), fun(Db) -> + ?assertMatch({ok, _}, couch_db:open_doc(Db, MDbName, [ejson_body])), + ?assertEqual({not_found, no_db_file}, couch_db:open_int(DbName, [?ADMIN_CTX])), + Resp = mem3_util:get_or_create_db(DbName, [?ADMIN_CTX]), + ?assertMatch({ok, _}, Resp), + {ok, LDb} = Resp, + + {Body} = test_db_doc(), + DbProps = mem3_util:get_shard_opts(Body), + {Props} = case couch_db_engine:get_props(LDb) of + undefined -> {[]}; + Else -> {Else} + end, + %% We don't normally store the default engine name + EngineProps = case couch_db_engine:get_engine(LDb) of + couch_bt_engine -> + []; + EngineName -> + [{engine, EngineName}] + end, + ?assertEqual([{props, Props} | EngineProps], DbProps) + end). + + +test_db_doc() -> + {[ + {<<"shard_suffix">>, ".1584997648"}, + {<<"changelog">>, [ + [<<"add">>, <<"00000000-7fffffff">>, <<"node1@127.0.0.1">>], + [<<"add">>, <<"00000000-7fffffff">>, <<"node2@127.0.0.1">>], + [<<"add">>, <<"00000000-7fffffff">>, <<"node3@127.0.0.1">>], + [<<"add">>, <<"80000000-ffffffff">>, <<"node1@127.0.0.1">>], + [<<"add">>, <<"80000000-ffffffff">>, <<"node2@127.0.0.1">>], + [<<"add">>, <<"80000000-ffffffff">>, <<"node3@127.0.0.1">>] + ]}, + {<<"by_node">>, {[ + {<<"node1@127.0.0.1">>, [<<"00000000-7fffffff">>, <<"80000000-ffffffff">>]}, + {<<"node2@127.0.0.1">>, [<<"00000000-7fffffff">>, <<"80000000-ffffffff">>]}, + {<<"node3@127.0.0.1">>, [<<"00000000-7fffffff">>, <<"80000000-ffffffff">>]} + ]}}, + {<<"by_range">>, {[ + {<<"00000000-7fffffff">>, [<<"node1@127.0.0.1">>, <<"node2@127.0.0.1">>, <<"node3@127.0.0.1">>]}, + {<<"80000000-ffffffff">>, [<<"node1@127.0.0.1">>, <<"node2@127.0.0.1">>, <<"node3@127.0.0.1">>]} + ]}}, + {<<"props">>, {[ + {partitioned, true}, + {hash, [couch_partition, hash, []]} + ]}} + ]}. + diff --git a/src/ioq/src/ioq.erl b/src/ioq/src/ioq.erl index 81d94a36f40..99b3ce3855a 100644 --- a/src/ioq/src/ioq.erl +++ b/src/ioq/src/ioq.erl @@ -45,7 +45,7 @@ call(Fd, Msg, Metadata) -> Priority = io_class(Msg, Metadata), case bypass(Priority) of true -> - gen_server:call(Fd, Msg); + gen_server:call(Fd, Msg, infinity); false -> queued_call(Fd, Msg, Priority) end. diff --git a/src/jwtf/.gitignore b/src/jwtf/.gitignore new file mode 100644 index 00000000000..5eadeac897a --- /dev/null +++ b/src/jwtf/.gitignore @@ -0,0 +1,4 @@ +*~ +_build/ +doc/ +rebar.lock diff --git a/src/jwtf/LICENSE b/src/jwtf/LICENSE new file mode 100644 index 00000000000..d9a10c0d8e8 --- /dev/null +++ b/src/jwtf/LICENSE @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/src/jwtf/README.md b/src/jwtf/README.md new file mode 100644 index 00000000000..e6038fbc060 --- /dev/null +++ b/src/jwtf/README.md @@ -0,0 +1,18 @@ +# jwtf + +JSON Web Token Functions + +This library provides JWT parsing and validation functions + +Supports; + +* Verify +* RS256 +* RS384 +* RS512 +* HS256 +* HS384 +* HS512 +* ES256 +* ES384 +* ES512 diff --git a/src/jwtf/rebar.config b/src/jwtf/rebar.config new file mode 100644 index 00000000000..e0d18443bce --- /dev/null +++ b/src/jwtf/rebar.config @@ -0,0 +1,2 @@ +{cover_enabled, true}. +{cover_print_enabled, true}. diff --git a/src/jwtf/src/jwtf.app.src b/src/jwtf/src/jwtf.app.src new file mode 100644 index 00000000000..24081bf6fd4 --- /dev/null +++ b/src/jwtf/src/jwtf.app.src @@ -0,0 +1,32 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +{application, jwtf, [ + {description, "JSON Web Token Functions"}, + {vsn, git}, + {registered, []}, + {applications, [ + kernel, + stdlib, + b64url, + config, + crypto, + jiffy, + public_key + ]}, + {mod, {jwtf_app, []}}, + {env,[]}, + {modules, []}, + {maintainers, []}, + {licenses, []}, + {links, []} +]}. diff --git a/src/jwtf/src/jwtf.erl b/src/jwtf/src/jwtf.erl new file mode 100644 index 00000000000..247f2b50870 --- /dev/null +++ b/src/jwtf/src/jwtf.erl @@ -0,0 +1,353 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +% @doc +% This module decodes and validates JWT tokens. Almost all property +% checks are optional. If not checked, the presence or validity of the +% field is not verified. Signature check is mandatory, though. + +-module(jwtf). + +-export([ + encode/3, + decode/3, + valid_algorithms/0, + verification_algorithm/1 +]). + +-define(ALGS, [ + {<<"RS256">>, {public_key, sha256}}, % RSA PKCS#1 signature with SHA-256 + {<<"RS384">>, {public_key, sha384}}, + {<<"RS512">>, {public_key, sha512}}, + {<<"ES256">>, {public_key, sha256}}, + {<<"ES384">>, {public_key, sha384}}, + {<<"ES512">>, {public_key, sha512}}, + {<<"HS256">>, {hmac, sha256}}, + {<<"HS384">>, {hmac, sha384}}, + {<<"HS512">>, {hmac, sha512}}]). + +-define(CHECKS, [ + alg, + exp, + iat, + iss, + kid, + nbf, + sig, + typ]). + + +% @doc encode +% Encode the JSON Header and Claims using Key and Alg obtained from Header +-spec encode(term(), term(), term()) -> + {ok, binary()} | no_return(). +encode(Header = {HeaderProps}, Claims, Key) -> + try + Alg = case prop(<<"alg">>, HeaderProps) of + undefined -> + throw({bad_request, <<"Missing alg header parameter">>}); + Val -> + Val + end, + EncodedHeader = b64url:encode(jiffy:encode(Header)), + EncodedClaims = b64url:encode(jiffy:encode(Claims)), + Message = <>, + SignatureOrMac = case verification_algorithm(Alg) of + {public_key, Algorithm} -> + public_key:sign(Message, Algorithm, Key); + {hmac, Algorithm} -> + crypto:hmac(Algorithm, Key, Message) + end, + EncodedSignatureOrMac = b64url:encode(SignatureOrMac), + {ok, <>} + catch + throw:Error -> + {error, Error} + end. + + +% @doc decode +% Decodes the supplied encoded token, checking +% for the attributes defined in Checks and calling +% the key store function to retrieve the key needed +% to verify the signature +decode(EncodedToken, Checks, KS) -> + try + [Header, Payload, Signature] = split(EncodedToken), + validate(Header, Payload, Signature, Checks, KS), + {ok, decode_b64url_json(Payload)} + catch + throw:Error -> + {error, Error} + end. + + +% @doc valid_algorithms +% Return a list of supported algorithms +-spec valid_algorithms() -> [binary()]. +valid_algorithms() -> + proplists:get_keys(?ALGS). + + +% @doc verification_algorithm +% Return {VerificationMethod, Algorithm} tuple for the specified Alg +-spec verification_algorithm(binary()) -> + {atom(), atom()} | no_return(). +verification_algorithm(Alg) -> + case lists:keyfind(Alg, 1, ?ALGS) of + {Alg, Val} -> + Val; + false -> + throw({bad_request, <<"Invalid alg header parameter">>}) + end. + + +validate(Header0, Payload0, Signature, Checks, KS) -> + validate_checks(Checks), + Header1 = props(decode_b64url_json(Header0)), + validate_header(Header1, Checks), + + Payload1 = props(decode_b64url_json(Payload0)), + validate_payload(Payload1, Checks), + + Alg = prop(<<"alg">>, Header1), + Key = key(Header1, Checks, KS), + verify(Alg, Header0, Payload0, Signature, Key). + + +validate_checks(Checks) when is_list(Checks) -> + case {lists:usort(Checks), lists:sort(Checks)} of + {L, L} -> + ok; + {L1, L2} -> + error({duplicate_checks, L2 -- L1}) + end, + {_, UnknownChecks} = lists:partition(fun valid_check/1, Checks), + case UnknownChecks of + [] -> + ok; + UnknownChecks -> + error({unknown_checks, UnknownChecks}) + end. + + +valid_check(Check) when is_atom(Check) -> + lists:member(Check, ?CHECKS); + +valid_check({Check, _}) when is_atom(Check) -> + lists:member(Check, ?CHECKS); + +valid_check(_) -> + false. + + +validate_header(Props, Checks) -> + validate_typ(Props, Checks), + validate_alg(Props, Checks). + + +validate_typ(Props, Checks) -> + Required = prop(typ, Checks), + TYP = prop(<<"typ">>, Props), + case {Required, TYP} of + {undefined, undefined} -> + ok; + {true, undefined} -> + throw({bad_request, <<"Missing typ header parameter">>}); + {_, <<"JWT">>} -> + ok; + {true, _} -> + throw({bad_request, <<"Invalid typ header parameter">>}) + end. + + +validate_alg(Props, Checks) -> + Required = prop(alg, Checks), + Alg = prop(<<"alg">>, Props), + case {Required, Alg} of + {undefined, undefined} -> + ok; + {true, undefined} -> + throw({bad_request, <<"Missing alg header parameter">>}); + {_, Alg} -> + case lists:member(Alg, valid_algorithms()) of + true -> + ok; + false -> + throw({bad_request, <<"Invalid alg header parameter">>}) + end + end. + + +%% Not all these fields have to be present, but if they _are_ present +%% they must be valid. +validate_payload(Props, Checks) -> + validate_iss(Props, Checks), + validate_iat(Props, Checks), + validate_nbf(Props, Checks), + validate_exp(Props, Checks). + + +validate_iss(Props, Checks) -> + ExpectedISS = prop(iss, Checks), + ActualISS = prop(<<"iss">>, Props), + + case {ExpectedISS, ActualISS} of + {undefined, undefined} -> + ok; + {ISS, undefined} when ISS /= undefined -> + throw({bad_request, <<"Missing iss claim">>}); + {ISS, ISS} -> + ok; + {_, _} -> + throw({bad_request, <<"Invalid iss claim">>}) + end. + + +validate_iat(Props, Checks) -> + Required = prop(iat, Checks), + IAT = prop(<<"iat">>, Props), + + case {Required, IAT} of + {undefined, undefined} -> + ok; + {true, undefined} -> + throw({bad_request, <<"Missing iat claim">>}); + {_, IAT} when is_integer(IAT) -> + ok; + {true, _} -> + throw({bad_request, <<"Invalid iat claim">>}) + end. + + +validate_nbf(Props, Checks) -> + Required = prop(nbf, Checks), + NBF = prop(<<"nbf">>, Props), + + case {Required, NBF} of + {undefined, undefined} -> + ok; + {true, undefined} -> + throw({bad_request, <<"Missing nbf claim">>}); + {_, IAT} -> + assert_past(<<"nbf">>, IAT) + end. + + +validate_exp(Props, Checks) -> + Required = prop(exp, Checks), + EXP = prop(<<"exp">>, Props), + + case {Required, EXP} of + {undefined, undefined} -> + ok; + {true, undefined} -> + throw({bad_request, <<"Missing exp claim">>}); + {_, EXP} -> + assert_future(<<"exp">>, EXP) + end. + + +key(Props, Checks, KS) -> + Alg = prop(<<"alg">>, Props), + Required = prop(kid, Checks), + KID = prop(<<"kid">>, Props), + case {Required, KID} of + {true, undefined} -> + throw({bad_request, <<"Missing kid claim">>}); + {_, KID} -> + KS(Alg, KID) + end. + + +verify(Alg, Header, Payload, SignatureOrMac0, Key) -> + Message = <
>, + SignatureOrMac1 = b64url:decode(SignatureOrMac0), + {VerificationMethod, Algorithm} = verification_algorithm(Alg), + case VerificationMethod of + public_key -> + public_key_verify(Algorithm, Message, SignatureOrMac1, Key); + hmac -> + hmac_verify(Algorithm, Message, SignatureOrMac1, Key) + end. + + +public_key_verify(Algorithm, Message, Signature, PublicKey) -> + case public_key:verify(Message, Algorithm, Signature, PublicKey) of + true -> + ok; + false -> + throw({bad_request, <<"Bad signature">>}) + end. + + +hmac_verify(Algorithm, Message, HMAC, SecretKey) -> + case crypto:hmac(Algorithm, SecretKey, Message) of + HMAC -> + ok; + _ -> + throw({bad_request, <<"Bad HMAC">>}) + end. + + +split(EncodedToken) -> + case binary:split(EncodedToken, <<$.>>, [global]) of + [_, _, _] = Split -> Split; + _ -> throw({bad_request, <<"Malformed token">>}) + end. + + +decode_b64url_json(B64UrlEncoded) -> + try + case b64url:decode(B64UrlEncoded) of + {error, Reason} -> + throw({bad_request, Reason}); + JsonEncoded -> + jiffy:decode(JsonEncoded) + end + catch + error:Error -> + throw({bad_request, Error}) + end. + + +props({Props}) -> + Props; + +props(_) -> + throw({bad_request, <<"Not an object">>}). + + +assert_past(Name, Time) -> + case Time < now_seconds() of + true -> + ok; + false -> + throw({unauthorized, <>}) + end. + +assert_future(Name, Time) -> + case Time > now_seconds() of + true -> + ok; + false -> + throw({unauthorized, <>}) + end. + + +now_seconds() -> + {MegaSecs, Secs, _MicroSecs} = os:timestamp(), + MegaSecs * 1000000 + Secs. + + +prop(Prop, Props) -> + proplists:get_value(Prop, Props). diff --git a/src/jwtf/src/jwtf_app.erl b/src/jwtf/src/jwtf_app.erl new file mode 100644 index 00000000000..bd708e2a3df --- /dev/null +++ b/src/jwtf/src/jwtf_app.erl @@ -0,0 +1,28 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(jwtf_app). + +-behaviour(application). + +%% Application callbacks +-export([start/2, stop/1]). + +%% =================================================================== +%% Application callbacks +%% =================================================================== + +start(_StartType, _StartArgs) -> + jwtf_sup:start_link(). + +stop(_State) -> + ok. diff --git a/src/jwtf/src/jwtf_keystore.erl b/src/jwtf/src/jwtf_keystore.erl new file mode 100644 index 00000000000..5c2b47985a6 --- /dev/null +++ b/src/jwtf/src/jwtf_keystore.erl @@ -0,0 +1,166 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(jwtf_keystore). +-behaviour(gen_server). +-behaviour(config_listener). + +-include_lib("public_key/include/public_key.hrl"). + +% public api. +-export([ + get/2, + start_link/0 +]). + +% gen_server api. +-export([init/1, handle_call/3, handle_cast/2, handle_info/2, + code_change/3, terminate/2]). + +% config_listener api +-export([handle_config_change/5, handle_config_terminate/3]). + +% public functions + +get(Alg, undefined) when is_binary(Alg) -> + get(Alg, <<"_default">>); + +get(Alg, KID0) when is_binary(Alg), is_binary(KID0) -> + Kty = kty(Alg), + KID = binary_to_list(KID0), + case ets:lookup(?MODULE, {Kty, KID}) of + [] -> + Key = get_from_config(Kty, KID), + ok = gen_server:call(?MODULE, {set, Kty, KID, Key}), + Key; + [{{Kty, KID}, Key}] -> + Key + end. + + +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). + +% gen_server functions + +init(_) -> + ok = config:listen_for_changes(?MODULE, nil), + ets:new(?MODULE, [public, named_table]), + {ok, nil}. + + +handle_call({set, Kty, KID, Key}, _From, State) -> + true = ets:insert(?MODULE, {{Kty, KID}, Key}), + {reply, ok, State}. + + +handle_cast({delete, Kty, KID}, State) -> + true = ets:delete(?MODULE, {Kty, KID}), + {noreply, State}; + +handle_cast(_Msg, State) -> + {noreply, State}. + + +handle_info(restart_config_listener, State) -> + ok = config:listen_for_changes(?MODULE, nil), + {noreply, State}; + +handle_info(_Msg, State) -> + {noreply, State}. + + +terminate(_Reason, _State) -> + ok. + + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. + + +% config listener callback + +handle_config_change("jwt_keys", ConfigKey, _ConfigValue, _, _) -> + case string:split(ConfigKey, ":") of + [Kty, KID] -> + gen_server:cast(?MODULE, {delete, Kty, KID}); + _ -> + ignored + end, + {ok, nil}; + +handle_config_change(_, _, _, _, _) -> + {ok, nil}. + +handle_config_terminate(_Server, stop, _State) -> + ok; + +handle_config_terminate(_Server, _Reason, _State) -> + erlang:send_after(100, whereis(?MODULE), restart_config_listener). + +% private functions + +get_from_config(Kty, KID) -> + case config:get("jwt_keys", string:join([Kty, KID], ":")) of + undefined -> + throw({bad_request, <<"Unknown kid">>}); + Encoded -> + case Kty of + "hmac" -> + try + base64:decode(Encoded) + catch + error:_ -> + throw({bad_request, <<"Not a valid key">>}) + end; + "rsa" -> + case pem_decode(Encoded) of + #'RSAPublicKey'{} = Key -> + Key; + _ -> + throw({bad_request, <<"not an RSA public key">>}) + end; + "ec" -> + case pem_decode(Encoded) of + {#'ECPoint'{}, _} = Key -> + Key; + _ -> + throw({bad_request, <<"not an EC public key">>}) + end + end + end. + +pem_decode(PEM) -> + BinPEM = re:replace(PEM, "\\\\n", "\n", [global, {return, binary}]), + try + case public_key:pem_decode(BinPEM) of + [PEMEntry] -> + public_key:pem_entry_decode(PEMEntry); + _ -> + throw({bad_request, <<"Not a valid key">>}) + end + catch + error:_ -> + throw({bad_request, <<"Not a valid key">>}) + end. + +kty(<<"HS", _/binary>>) -> + "hmac"; + +kty(<<"RS", _/binary>>) -> + "rsa"; + +kty(<<"ES", _/binary>>) -> + "ec"; + +kty(_) -> + throw({bad_request, <<"Unknown kty">>}). diff --git a/src/jwtf/src/jwtf_sup.erl b/src/jwtf/src/jwtf_sup.erl new file mode 100644 index 00000000000..6f44808dee4 --- /dev/null +++ b/src/jwtf/src/jwtf_sup.erl @@ -0,0 +1,38 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(jwtf_sup). + +-behaviour(supervisor). + +%% API +-export([start_link/0]). + +%% Supervisor callbacks +-export([init/1]). + +%% Helper macro for declaring children of supervisor +-define(CHILD(I, Type), {I, {I, start_link, []}, permanent, 5000, Type, [I]}). + +%% =================================================================== +%% API functions +%% =================================================================== + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +%% =================================================================== +%% Supervisor callbacks +%% =================================================================== + +init([]) -> + {ok, { {one_for_one, 5, 10}, [?CHILD(jwtf_keystore, worker)]} }. diff --git a/src/jwtf/test/jwtf_keystore_tests.erl b/src/jwtf/test/jwtf_keystore_tests.erl new file mode 100644 index 00000000000..acbc002b5b9 --- /dev/null +++ b/src/jwtf/test/jwtf_keystore_tests.erl @@ -0,0 +1,64 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(jwtf_keystore_tests). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("public_key/include/public_key.hrl"). + +-define(HMAC_SECRET, "aGVsbG8="). +-define(RSA_SECRET, "-----BEGIN PUBLIC KEY-----\\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAztanwQtIx0sms+x7m1SF\\nh7EHJHkM2biTJ41jR89FsDE2gd3MChpaqxemS5GpNvfFKRvuHa4PUZ3JtRCBG1KM\\n/7EWIVTy1JQDr2mb8couGlQNqz4uXN2vkNQ0XszgjU4Wn6ZpvYxmqPFbmkRe8QSn\\nAy2Wf8jQgjsbez8eaaX0G9S1hgFZUN3KFu7SVmUDQNvWpQdaJPP+ms5Z0CqF7JLa\\nvJmSdsU49nlYw9VH/XmwlUBMye6HgR4ZGCLQS85frqF0xLWvi7CsMdchcIjHudXH\\nQK1AumD/VVZVdi8Q5Qew7F6VXeXqnhbw9n6Px25cCuNuh6u5+E6GUzXRrMpqo9vO\\nqQIDAQAB\\n-----END PUBLIC KEY-----\\n"). +-define(BAD_RSA_SECRET,"-----BEGIN PUBLIC KEY-----\\nMIIDAzCCAeugAwIBAgIJAL5YnwkF5jT6MA0GCSqGSIb3DQEBBQUAMBgxFjAUBgNV\\nBAMMDWZvby5hdXRoMC5jb20wHhcNMTQwMzE4MjAwNzUwWhcNMjcxMTI1MjAwNzUw\\nWjAYMRYwFAYDVQQDDA1mb28uYXV0aDAuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOC\\nAQ8AMIIBCgKCAQEAtP6w43ppU0nkqGNHASojFJl60+k3isNVzYTO06f2vm/5tc3l\\nRhEA6ykyIuO8tHY3Ziqowc4h8XGaeDKqHw/BSS/b54F2rUVb/wACWyJICkM3bGtC\\ntWmM7kU8XZRCqXV04qIgQte+9GFSOax/TFyotS+FGFyFPUY+b57H7/6wNQ8ywGLi\\nWCbrWEx4wOJbGhnVNV+STmZXJgToLgz0R2kwsiGURhHMkNkUjcRl34nSv+lMYSMK\\nyywwzu0k3KBgqkxWibU3pa3jibWVRxc20f8ltfByp/wU/ICQ0MNGJ3/KaCiOtGQa\\noZOa7bMzb4W1x2L3cfgrshLrp978+FEeNzY9KQIDAQABo1AwTjAdBgNVHQ4EFgQU\\nOyDe79RE2SYTcCNPbniw3p4uZykwHwYDVR0jBBgwFoAUOyDe79RE2SYTcCNPbniw\\n3p4uZykwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAW0mB5wR1sSHC\\n7iSmQo1uioH80X7txJY6zXH8hVjoCQOGUCi79x43L9wUTtyJg44Z8RhNozWOsCZM\\nf5LDSkeNx48QITrinDqWv5C/NA0klJ1g0Y/jN9X01r5T6vGdge8inIbQcO7ZrJ6v\\nVYDH+9HLvfPKFYd0uhYRFnw2aa3mKIRsanlWSEYHQr5Aoa+nboFLRiDtVWBuiAoV\\nZ1NoYm7uheU42CNGJqkv6SXxKHTea2TjmOxKRmaxYMvkjk/CsiPrSEQHUxDXqSSd\\nrIWU8o+9q9Hpdb3UuNJzMjlTzg2/UeHpzMBJAWxUlzTuXMqrrDFF9V/d4zO77Ts/\\n4mRBKB+GsQ==\\n-----END PUBLIC KEY-----\\n"). + +-define(EC_SECRET, "-----BEGIN PUBLIC KEY-----\\nMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEDsr0lz/Dg3luarb+Kua0Wcj9WrfR23os\\nwHzakglb8GhWRDn+oZT0Bt/26sX8uB4/ij9PEOLHPo+IHBtX4ELFFVr5GTzlqcJe\\nyctaTDd1OOAPXYuc67EWtGZ3pDAzztRs\\n-----END PUBLIC KEY-----\\n"). + +setup() -> + test_util:start_applications([config, jwtf]), + config:set("jwt_keys", "hmac:hmac", ?HMAC_SECRET), + config:set("jwt_keys", "rsa:hmac", ?HMAC_SECRET), + config:set("jwt_keys", "ec:hmac", ?HMAC_SECRET), + + config:set("jwt_keys", "hmac:rsa", ?RSA_SECRET), + config:set("jwt_keys", "rsa:rsa", ?RSA_SECRET), + config:set("jwt_keys", "ec:rsa", ?RSA_SECRET), + + config:set("jwt_keys", "hmac:ec", ?EC_SECRET), + config:set("jwt_keys", "rsa:ec", ?EC_SECRET), + config:set("jwt_keys", "ec:ec", ?EC_SECRET), + + config:set("jwt_keys", "rsa:badrsa", ?BAD_RSA_SECRET). + + +teardown(_) -> + test_util:stop_applications([config, jwtf]). + +jwtf_keystore_test_() -> + { + setup, + fun setup/0, + fun teardown/1, + [ + ?_assertEqual(<<"hello">>, jwtf_keystore:get(<<"HS256">>, <<"hmac">>)), + ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"RS256">>, <<"hmac">>)), + ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"ES256">>, <<"hmac">>)), + + ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"HS256">>, <<"rsa">>)), + ?_assertMatch(#'RSAPublicKey'{}, jwtf_keystore:get(<<"RS256">>, <<"rsa">>)), + ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"ES256">>, <<"rsa">>)), + + ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"HS256">>, <<"ec">>)), + ?_assertThrow({bad_request, _}, jwtf_keystore:get(<<"RS256">>, <<"ec">>)), + ?_assertMatch({#'ECPoint'{}, _}, jwtf_keystore:get(<<"ES256">>, <<"ec">>)), + + ?_assertThrow({bad_request, <<"Not a valid key">>}, jwtf_keystore:get(<<"RS256">>, <<"badrsa">>)) + ] + }. diff --git a/src/jwtf/test/jwtf_tests.erl b/src/jwtf/test/jwtf_tests.erl new file mode 100644 index 00000000000..ba944f7c713 --- /dev/null +++ b/src/jwtf/test/jwtf_tests.erl @@ -0,0 +1,317 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(jwtf_tests). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("public_key/include/public_key.hrl"). + +encode(Header0, Payload0) -> + Header1 = b64url:encode(jiffy:encode(Header0)), + Payload1 = b64url:encode(jiffy:encode(Payload0)), + Sig = b64url:encode(<<"bad">>), + <>. + +valid_header() -> + {[{<<"typ">>, <<"JWT">>}, {<<"alg">>, <<"RS256">>}]}. + +jwt_io_pubkey() -> + PublicKeyPEM = <<"-----BEGIN PUBLIC KEY-----\n" + "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDdlatRjRjogo3WojgGH" + "FHYLugdUWAY9iR3fy4arWNA1KoS8kVw33cJibXr8bvwUAUparCwlvdbH6" + "dvEOfou0/gCFQsHUfQrSDv+MuSUMAe8jzKE4qW+jK+xQU9a03GUnKHkkl" + "e+Q0pX/g6jXZ7r1/xAK5Do2kQ+X5xK9cipRgEKwIDAQAB\n" + "-----END PUBLIC KEY-----\n">>, + [PEMEntry] = public_key:pem_decode(PublicKeyPEM), + public_key:pem_entry_decode(PEMEntry). + + +b64_badarg_test() -> + Encoded = <<"0.0.0">>, + ?assertEqual({error, {bad_request,badarg}}, + jwtf:decode(Encoded, [], nil)). + + +b64_bad_block_test() -> + Encoded = <<" aGVsbG8. aGVsbG8. aGVsbG8">>, + ?assertEqual({error, {bad_request,{bad_block,0}}}, + jwtf:decode(Encoded, [], nil)). + + +invalid_json_test() -> + Encoded = <<"fQ.fQ.fQ">>, + ?assertEqual({error, {bad_request,{1,invalid_json}}}, + jwtf:decode(Encoded, [], nil)). + + +truncated_json_test() -> + Encoded = <<"ew.ew.ew">>, + ?assertEqual({error, {bad_request,{2,truncated_json}}}, + jwtf:decode(Encoded, [], nil)). + + +missing_typ_test() -> + Encoded = encode({[]}, []), + ?assertEqual({error, {bad_request,<<"Missing typ header parameter">>}}, + jwtf:decode(Encoded, [typ], nil)). + + +invalid_typ_test() -> + Encoded = encode({[{<<"typ">>, <<"NOPE">>}]}, []), + ?assertEqual({error, {bad_request,<<"Invalid typ header parameter">>}}, + jwtf:decode(Encoded, [typ], nil)). + + +missing_alg_test() -> + Encoded = encode({[]}, []), + ?assertEqual({error, {bad_request,<<"Missing alg header parameter">>}}, + jwtf:decode(Encoded, [alg], nil)). + + +invalid_alg_test() -> + Encoded = encode({[{<<"alg">>, <<"NOPE">>}]}, []), + ?assertEqual({error, {bad_request,<<"Invalid alg header parameter">>}}, + jwtf:decode(Encoded, [alg], nil)). + + +missing_iss_test() -> + Encoded = encode(valid_header(), {[]}), + ?assertEqual({error, {bad_request,<<"Missing iss claim">>}}, + jwtf:decode(Encoded, [{iss, right}], nil)). + + +invalid_iss_test() -> + Encoded = encode(valid_header(), {[{<<"iss">>, <<"wrong">>}]}), + ?assertEqual({error, {bad_request,<<"Invalid iss claim">>}}, + jwtf:decode(Encoded, [{iss, right}], nil)). + + +missing_iat_test() -> + Encoded = encode(valid_header(), {[]}), + ?assertEqual({error, {bad_request,<<"Missing iat claim">>}}, + jwtf:decode(Encoded, [iat], nil)). + + +invalid_iat_test() -> + Encoded = encode(valid_header(), {[{<<"iat">>, <<"hello">>}]}), + ?assertEqual({error, {bad_request,<<"Invalid iat claim">>}}, + jwtf:decode(Encoded, [iat], nil)). + + +missing_nbf_test() -> + Encoded = encode(valid_header(), {[]}), + ?assertEqual({error, {bad_request,<<"Missing nbf claim">>}}, + jwtf:decode(Encoded, [nbf], nil)). + + +invalid_nbf_test() -> + Encoded = encode(valid_header(), {[{<<"nbf">>, 2 * now_seconds()}]}), + ?assertEqual({error, {unauthorized, <<"nbf not in past">>}}, + jwtf:decode(Encoded, [nbf], nil)). + + +missing_exp_test() -> + Encoded = encode(valid_header(), {[]}), + ?assertEqual({error, {bad_request, <<"Missing exp claim">>}}, + jwtf:decode(Encoded, [exp], nil)). + + +invalid_exp_test() -> + Encoded = encode(valid_header(), {[{<<"exp">>, 0}]}), + ?assertEqual({error, {unauthorized, <<"exp not in future">>}}, + jwtf:decode(Encoded, [exp], nil)). + + +missing_kid_test() -> + Encoded = encode({[]}, {[]}), + ?assertEqual({error, {bad_request, <<"Missing kid claim">>}}, + jwtf:decode(Encoded, [kid], nil)). + + +public_key_not_found_test() -> + Encoded = encode( + {[{<<"alg">>, <<"RS256">>}, {<<"kid">>, <<"1">>}]}, + {[]}), + KS = fun(_, _) -> throw(not_found) end, + Expected = {error, not_found}, + ?assertEqual(Expected, jwtf:decode(Encoded, [], KS)). + + +bad_rs256_sig_test() -> + Encoded = encode( + {[{<<"typ">>, <<"JWT">>}, {<<"alg">>, <<"RS256">>}]}, + {[]}), + KS = fun(<<"RS256">>, undefined) -> jwt_io_pubkey() end, + ?assertEqual({error, {bad_request, <<"Bad signature">>}}, + jwtf:decode(Encoded, [], KS)). + + +bad_hs256_sig_test() -> + Encoded = encode( + {[{<<"typ">>, <<"JWT">>}, {<<"alg">>, <<"HS256">>}]}, + {[]}), + KS = fun(<<"HS256">>, undefined) -> <<"bad">> end, + ?assertEqual({error, {bad_request, <<"Bad HMAC">>}}, + jwtf:decode(Encoded, [], KS)). + + +malformed_token_test() -> + ?assertEqual({error, {bad_request, <<"Malformed token">>}}, + jwtf:decode(<<"a.b.c.d">>, [], nil)). + +unknown_atom_check_test() -> + ?assertError({unknown_checks, [foo, bar]}, + jwtf:decode(<<"a.b.c">>, [exp, foo, iss, bar], nil)). + +unknown_binary_check_test() -> + ?assertError({unknown_checks, [<<"bar">>]}, + jwtf:decode(<<"a.b.c">>, [exp, iss, <<"bar">>], nil)). + +duplicate_check_test() -> + ?assertError({duplicate_checks, [exp]}, + jwtf:decode(<<"a.b.c">>, [exp, exp], nil)). + + +%% jwt.io generated +hs256_test() -> + EncodedToken = <<"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjEyMzQ1Ni" + "J9.eyJpc3MiOiJodHRwczovL2Zvby5jb20iLCJpYXQiOjAsImV4cCI" + "6MTAwMDAwMDAwMDAwMDAsImtpZCI6ImJhciJ9.iS8AH11QHHlczkBn" + "Hl9X119BYLOZyZPllOVhSBZ4RZs">>, + KS = fun(<<"HS256">>, <<"123456">>) -> <<"secret">> end, + Checks = [{iss, <<"https://foo.com">>}, iat, exp, typ, alg, kid], + ?assertMatch({ok, _}, catch jwtf:decode(EncodedToken, Checks, KS)). + + +%% pip install PyJWT +%% > import jwt +%% > jwt.encode({'foo':'bar'}, 'secret', algorithm='HS384') +hs384_test() -> + EncodedToken = <<"eyJhbGciOiJIUzM4NCIsInR5cCI6IkpXVCJ9.eyJmb28iOiJiYXIif" + "Q.2quwghs6I56GM3j7ZQbn-ASZ53xdBqzPzTDHm_CtVec32LUy-Ezy" + "L3JjIe7WjL93">>, + KS = fun(<<"HS384">>, _) -> <<"secret">> end, + ?assertMatch({ok, {[{<<"foo">>,<<"bar">>}]}}, + catch jwtf:decode(EncodedToken, [], KS)). + + +%% pip install PyJWT +%% > import jwt +%% > jwt.encode({'foo':'bar'}, 'secret', algorithm='HS512') +hs512_test() -> + EncodedToken = <<"eyJhbGciOiJIUzUxMiIsInR5cCI6IkpXVCJ9.eyJmb28iOiJiYX" + "IifQ.WePl7achkd0oGNB8XRF_LJwxlyiPZqpdNgdKpDboAjSTsW" + "q-aOGNynTp8TOv8KjonFym8vwFwppXOLoLXbkIaQ">>, + KS = fun(<<"HS512">>, _) -> <<"secret">> end, + ?assertMatch({ok, {[{<<"foo">>,<<"bar">>}]}}, + catch jwtf:decode(EncodedToken, [], KS)). + + +%% jwt.io generated +rs256_test() -> + EncodedToken = <<"eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0N" + "TY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.Ek" + "N-DOsnsuRjRO6BxXemmJDm3HbxrbRzXglbN2S4sOkopdU4IsDxTI8j" + "O19W_A4K8ZPJijNLis4EZsHeY559a4DFOd50_OqgHGuERTqYZyuhtF" + "39yxJPAjUESwxk2J5k_4zM3O-vtd1Ghyo4IbqKKSy6J9mTniYJPenn" + "5-HIirE">>, + + Checks = [sig, alg], + KS = fun(<<"RS256">>, undefined) -> jwt_io_pubkey() end, + + ExpectedPayload = {[ + {<<"sub">>, <<"1234567890">>}, + {<<"name">>, <<"John Doe">>}, + {<<"admin">>, true} + ]}, + + ?assertMatch({ok, ExpectedPayload}, jwtf:decode(EncodedToken, Checks, KS)). + + +encode_missing_alg_test() -> + ?assertEqual({error, {bad_request, <<"Missing alg header parameter">>}}, + jwtf:encode({[]}, {[]}, <<"foo">>)). + + +encode_invalid_alg_test() -> + ?assertEqual({error, {bad_request, <<"Invalid alg header parameter">>}}, + jwtf:encode({[{<<"alg">>, <<"BOGUS">>}]}, {[]}, <<"foo">>)). + + +encode_decode_test_() -> + [{Alg, encode_decode(Alg)} || Alg <- jwtf:valid_algorithms()]. + + +encode_decode(Alg) -> + {EncodeKey, DecodeKey} = case jwtf:verification_algorithm(Alg) of + {public_key, _Algorithm} -> + create_keypair(); + {hmac, _Algorithm} -> + Key = <<"a-super-secret-key">>, + {Key, Key} + end, + Claims = claims(), + {ok, Encoded} = jwtf:encode(header(Alg), Claims, EncodeKey), + KS = fun(_, _) -> DecodeKey end, + {ok, Decoded} = jwtf:decode(Encoded, [], KS), + ?_assertMatch(Claims, Decoded). + + +header(Alg) -> + {[ + {<<"typ">>, <<"JWT">>}, + {<<"alg">>, Alg}, + {<<"kid">>, <<"20170520-00:00:00">>} + ]}. + + +claims() -> + EpochSeconds = os:system_time(second), + {[ + {<<"iat">>, EpochSeconds}, + {<<"exp">>, EpochSeconds + 3600} + ]}. + +create_keypair() -> + %% https://tools.ietf.org/html/rfc7517#appendix-C + N = decode(<<"t6Q8PWSi1dkJj9hTP8hNYFlvadM7DflW9mWepOJhJ66w7nyoK1gPNqFMSQRy" + "O125Gp-TEkodhWr0iujjHVx7BcV0llS4w5ACGgPrcAd6ZcSR0-Iqom-QFcNP" + "8Sjg086MwoqQU_LYywlAGZ21WSdS_PERyGFiNnj3QQlO8Yns5jCtLCRwLHL0" + "Pb1fEv45AuRIuUfVcPySBWYnDyGxvjYGDSM-AqWS9zIQ2ZilgT-GqUmipg0X" + "OC0Cc20rgLe2ymLHjpHciCKVAbY5-L32-lSeZO-Os6U15_aXrk9Gw8cPUaX1" + "_I8sLGuSiVdt3C_Fn2PZ3Z8i744FPFGGcG1qs2Wz-Q">>), + E = decode(<<"AQAB">>), + D = decode(<<"GRtbIQmhOZtyszfgKdg4u_N-R_mZGU_9k7JQ_jn1DnfTuMdSNprTeaSTyWfS" + "NkuaAwnOEbIQVy1IQbWVV25NY3ybc_IhUJtfri7bAXYEReWaCl3hdlPKXy9U" + "vqPYGR0kIXTQRqns-dVJ7jahlI7LyckrpTmrM8dWBo4_PMaenNnPiQgO0xnu" + "ToxutRZJfJvG4Ox4ka3GORQd9CsCZ2vsUDmsXOfUENOyMqADC6p1M3h33tsu" + "rY15k9qMSpG9OX_IJAXmxzAh_tWiZOwk2K4yxH9tS3Lq1yX8C1EWmeRDkK2a" + "hecG85-oLKQt5VEpWHKmjOi_gJSdSgqcN96X52esAQ">>), + RSAPrivateKey = #'RSAPrivateKey'{ + modulus = N, + publicExponent = E, + privateExponent = D + }, + RSAPublicKey = #'RSAPublicKey'{ + modulus = N, + publicExponent = E + }, + {RSAPrivateKey, RSAPublicKey}. + + +decode(Goop) -> + crypto:bytes_to_integer(b64url:decode(Goop)). + + +now_seconds() -> + {MegaSecs, Secs, _MicroSecs} = os:timestamp(), + MegaSecs * 1000000 + Secs. diff --git a/src/mango/README.md b/src/mango/README.md index 4c4bb60a672..7cec1af3561 100644 --- a/src/mango/README.md +++ b/src/mango/README.md @@ -7,18 +7,37 @@ A MongoDB inspired query language interface for Apache CouchDB. Motivation ---------- -Mango provides a single HTTP API endpoint that accepts JSON bodies via HTTP POST. These bodies provide a set of instructions that will be handled with the results being returned to the client in the same order as they were specified. The general principle of this API is to be simple to implement on the client side while providing users a more natural conversion to Apache CouchDB than would otherwise exist using the standard RESTful HTTP interface that already exists. +Mango provides a single HTTP API endpoint that accepts JSON bodies via +HTTP POST. These bodies provide a set of instructions that will be +handled with the results being returned to the client in the same +order as they were specified. The general principle of this API is to +be simple to implement on the client side while providing users a more +natural conversion to Apache CouchDB than would otherwise exist using +the standard RESTful HTTP interface that already exists. Actions ------- -The general API exposes a set of actions that are similar to what MongoDB exposes (although not all of MongoDB's API is supported). These are meant to be loosely and obviously inspired by MongoDB but without too much attention to maintaining the exact behavior. - -Each action is specified as a JSON object with a number of keys that affect the behavior. Each action object has at least one field named "action" which must -have a string value indicating the action to be performed. For each action there are zero or more fields that will affect behavior. Some of these fields are required and some are optional. - -For convenience, the HTTP API will accept a JSON body that is either a single JSON object which specifies a single action or a JSON array that specifies a list of actions that will then be invoked serially. While multiple commands can be batched into a single HTTP request, there are no guarantees about atomicity or isolation for a batch of commands. +The general API exposes a set of actions that are similar to what +MongoDB exposes (although not all of MongoDB's API is +supported). These are meant to be loosely and obviously inspired by +MongoDB but without too much attention to maintaining the exact +behavior. + +Each action is specified as a JSON object with a number of keys that +affect the behavior. Each action object has at least one field named +"action" which must have a string value indicating the action to be +performed. For each action there are zero or more fields that will +affect behavior. Some of these fields are required and some are +optional. + +For convenience, the HTTP API will accept a JSON body that is either a +single JSON object which specifies a single action or a JSON array +that specifies a list of actions that will then be invoked +serially. While multiple commands can be batched into a single HTTP +request, there are no guarantees about atomicity or isolation for a +batch of commands. Activating Query on a cluster -------------------------------------------- @@ -32,24 +51,36 @@ rpc:multicall(config, set, ["native_query_servers", "query", "{mango_native_proc HTTP API ======== -This API adds a single URI endpoint to the existing CouchDB HTTP API. Creating databases, authentication, Map/Reduce views, etc are all still supported exactly as currently document. No existing behavior is changed. +This API adds a single URI endpoint to the existing CouchDB HTTP +API. Creating databases, authentication, Map/Reduce views, etc are all +still supported exactly as currently document. No existing behavior is +changed. -The endpoint added is for the URL pattern `/dbname/_query` and has the following characteristics: +The endpoint added is for the URL pattern `/dbname/_query` and has the +following characteristics: * The only HTTP method supported is `POST`. * The request `Content-Type` must be `application/json`. * The response status code will either be `200`, `4XX`, or `5XX` * The response `Content-Type` will be `application/json` * The response `Transfer-Encoding` will be `chunked`. -* The response is a single JSON object or array that matches to the single command or list of commands that exist in the request. +* The response is a single JSON object or array that matches to the + single command or list of commands that exist in the request. -This is intended to be a significantly simpler use of HTTP than the current APIs. This is motivated by the fact that this entire API is aimed at customers who are not as savvy at HTTP or non-relational document stores. Once a customer is comfortable using this API we hope to expose any other "power features" through the existing HTTP API and its adherence to HTTP semantics. +This is intended to be a significantly simpler use of HTTP than the +current APIs. This is motivated by the fact that this entire API is +aimed at customers who are not as savvy at HTTP or non-relational +document stores. Once a customer is comfortable using this API we hope +to expose any other "power features" through the existing HTTP API and +its adherence to HTTP semantics. Supported Actions ================= -This is a list of supported actions that Mango understands. For the time being it is limited to the four normal CRUD actions plus one meta action to create indices on the database. +This is a list of supported actions that Mango understands. For the +time being it is limited to the four normal CRUD actions plus one meta +action to create indices on the database. insert ------ @@ -62,9 +93,15 @@ Keys: * docs - The JSON document to insert * w (optional) (default: 2) - An integer > 0 for the write quorum size -If the provided document or documents do not contain an "\_id" field one will be added using an automatically generated UUID. +If the provided document or documents do not contain an "\_id" field +one will be added using an automatically generated UUID. -It is more performant to specify multiple documents in the "docs" field than it is to specify multiple independent insert actions. Each insert action is submitted as a single bulk update (ie, \_bulk\_docs in CouchDB terminology). This, however, does not make any guarantees on the isolation or atomicity of the bulk operation. It is merely a performance benefit. +It is more performant to specify multiple documents in the "docs" +field than it is to specify multiple independent insert actions. Each +insert action is submitted as a single bulk update (ie, \_bulk\_docs +in CouchDB terminology). This, however, does not make any guarantees +on the isolation or atomicity of the bulk operation. It is merely a +performance benefit. find @@ -76,18 +113,41 @@ Keys: * action - "find" * selector - JSON object following selector syntax, described below -* limit (optional) (default: 25) - integer >= 0, Limit the number of rows returned -* skip (optional) (default: 0) - integer >= 0, Skip the specified number of rows -* sort (optional) (default: []) - JSON array following sort syntax, described below -* fields (optional) (default: null) - JSON array following the field syntax, described below -* r (optional) (default: 1) - By default a find will return the document that was found when traversing the index. Optionally there can be a quorum read for each document using `r` as the read quorum. This is obviously less performant than using the document local to the index. -* conflicts (optional) (default: false) - boolean, whether or not to include information about any existing conflicts for the document. - -The important thing to note about the find command is that it must execute over a generated index. If a selector is provided that cannot be satisfied using an existing index the list of basic indices that could be used will be returned. - -For the most part, indices are generated in response to the "create\_index" action (described below) although there are two special indices that can be used as well. The "\_id" is automatically indexed and is similar to every other index. There is also a special "\_seq" index to retrieve documents in the order of their update sequence. - -Its also quite possible to generate a query that can't be satisfied by any index. In this case an error will be returned stating that fact. Generally speaking the easiest way to stumble onto this is to attempt to OR two separate fields which would require a complete table scan. In the future I expect to support these more complicated queries using an extended indexing API (which deviates from the current MongoDB model a bit). +* limit (optional) (default: 25) - integer >= 0, Limit the number of + rows returned +* skip (optional) (default: 0) - integer >= 0, Skip the specified + number of rows +* sort (optional) (default: []) - JSON array following sort syntax, + described below +* fields (optional) (default: null) - JSON array following the field + syntax, described below +* r (optional) (default: 1) - By default a find will return the + document that was found when traversing the index. Optionally there + can be a quorum read for each document using `r` as the read + quorum. This is obviously less performant than using the document + local to the index. +* conflicts (optional) (default: false) - boolean, whether or not to + include information about any existing conflicts for the document. + +The important thing to note about the find command is that it must +execute over a generated index. If a selector is provided that cannot +be satisfied using an existing index the list of basic indices that +could be used will be returned. + +For the most part, indices are generated in response to the +"create\_index" action (described below) although there are two +special indices that can be used as well. The "\_id" is automatically +indexed and is similar to every other index. There is also a special +"\_seq" index to retrieve documents in the order of their update +sequence. + +Its also quite possible to generate a query that can't be satisfied by +any index. In this case an error will be returned stating that +fact. Generally speaking the easiest way to stumble onto this is to +attempt to OR two separate fields which would require a complete table +scan. In the future I expect to support these more complicated queries +using an extended indexing API (which deviates from the current +MongoDB model a bit). update @@ -100,15 +160,24 @@ Keys: * action - "update" * selector - JSON object following selector syntax, described below * update - JSON object following update syntax, described below -* upsert - (optional) (default: false) - boolean, Whether or not to create a new document if the selector does not match any documents in the database -* limit (optional) (default: 1) - integer > 0, How many documents returned from the selector should be modified. Currently has a maximum value of 100 -* sort - (optional) (default: []) - JSON array following sort syntax, described below +* upsert - (optional) (default: false) - boolean, Whether or not to + create a new document if the selector does not match any documents + in the database +* limit (optional) (default: 1) - integer > 0, How many documents + returned from the selector should be modified. Currently has a + maximum value of 100 +* sort - (optional) (default: []) - JSON array following sort syntax, + described below * r (optional) (default: 1) - integer > 0, read quorum constant * w (optional) (default: 2) - integer > 0, write quorum constant -Updates are fairly straightforward other than to mention that the selector (like find) must be satisifiable using an existing index. +Updates are fairly straightforward other than to mention that the +selector (like find) must be satisifiable using an existing index. -On the update field, if the provided JSON object has one or more update operator (described below) then the operation is applied onto the existing document (if one exists) else the entire contents are replaced with exactly the value of the `update` field. +On the update field, if the provided JSON object has one or more +update operator (described below) then the operation is applied onto +the existing document (if one exists) else the entire contents are +replaced with exactly the value of the `update` field. delete @@ -120,15 +189,24 @@ Keys: * action - "delete" * selector - JSON object following selector syntax, described below -* force (optional) (default: false) - Delete all conflicted versions of the document as well -* limit - (optional) (default: 1) - integer > 0, How many documents to delete from the database. Currently has a maximum value of 100 -* sort - (optional) (default: []) - JSON array following sort syntax, described below +* force (optional) (default: false) - Delete all conflicted versions + of the document as well +* limit - (optional) (default: 1) - integer > 0, How many documents to + delete from the database. Currently has a maximum value of 100 +* sort - (optional) (default: []) - JSON array following sort syntax, + described below * r (optional) (default: 1) - integer > 1, read quorum constant * w (optional) (default: 2) - integer > 0, write quorum constant -Deletes behave quite similarly to update except they attempt to remove documents from the database. Its important to note that if a document has conflicts it may "appear" that delete's aren't having an effect. This is because the delete operation by default only removes a single revision. Specify `"force":true` if you would like to attempt to delete all live revisions. +Deletes behave quite similarly to update except they attempt to remove +documents from the database. Its important to note that if a document +has conflicts it may "appear" that delete's aren't having an +effect. This is because the delete operation by default only removes a +single revision. Specify `"force":true` if you would like to attempt +to delete all live revisions. -If you wish to delete a specific revision of the document, you can specify it in the selector using the special "\_rev" field. +If you wish to delete a specific revision of the document, you can +specify it in the selector using the special "\_rev" field. create\_index @@ -140,17 +218,43 @@ Keys: * action - "create\_index" * index - JSON array following sort syntax, described below -* type (optional) (default: "json") - string, specifying the index type to create. Currently only "json" indexes are supported but in the future we will provide full-text indexes as well as Geo spatial indexes -* name (optional) - string, optionally specify a name for the index. If a name is not provided one will be automatically generated -* ddoc (optional) - Indexes can be grouped into design documents underneath the hood for efficiency. This is an advanced feature. Don't specify a design document here unless you know the consequences of index invalidation. By default each index is placed in its own separate design document for isolation. - -Anytime an operation is required to locate a document in the database it is required that an index must exist that can be used to locate it. By default the only two indices that exist are for the document "\_id" and the special "\_seq" index. - -Indices are created in the background. If you attempt to create an index on a large database and then immediately utilize it, the request may block for a considerable amount of time before the request completes. - -Indices can specify multiple fields to index simultaneously. This is roughly analogous to a compound index in SQL with the corresponding tradeoffs. For instance, an index may contain the (ordered set of) fields "foo", "bar", and "baz". If a selector specifying "bar" is received, it can not be answered. Although if a selector specifying "foo" and "bar" is received, it can be answered more efficiently than if there were only an index on "foo" and "bar" independently. - -NB: while the index allows the ability to specify sort directions these are currently not supported. The sort direction must currently be specified as "asc" in the JSON. [INTERNAL]: This will require that we patch the view engine as well as the cluster coordinators in Fabric to follow the specified sort orders. The concepts are straightforward but the implementation may need some thought to fit into the current shape of things. +* type (optional) (default: "json") - string, specifying the index + type to create. Currently only "json" indexes are supported but in + the future we will provide full-text indexes as well as Geo spatial + indexes +* name (optional) - string, optionally specify a name for the + index. If a name is not provided one will be automatically generated +* ddoc (optional) - Indexes can be grouped into design documents + underneath the hood for efficiency. This is an advanced + feature. Don't specify a design document here unless you know the + consequences of index invalidation. By default each index is placed + in its own separate design document for isolation. + +Anytime an operation is required to locate a document in the database +it is required that an index must exist that can be used to locate +it. By default the only two indices that exist are for the document +"\_id" and the special "\_seq" index. + +Indices are created in the background. If you attempt to create an +index on a large database and then immediately utilize it, the request +may block for a considerable amount of time before the request +completes. + +Indices can specify multiple fields to index simultaneously. This is +roughly analogous to a compound index in SQL with the corresponding +tradeoffs. For instance, an index may contain the (ordered set of) +fields "foo", "bar", and "baz". If a selector specifying "bar" is +received, it can not be answered. Although if a selector specifying +"foo" and "bar" is received, it can be answered more efficiently than +if there were only an index on "foo" and "bar" independently. + +NB: while the index allows the ability to specify sort directions +these are currently not supported. The sort direction must currently +be specified as "asc" in the JSON. [INTERNAL]: This will require that +we patch the view engine as well as the cluster coordinators in Fabric +to follow the specified sort orders. The concepts are straightforward +but the implementation may need some thought to fit into the current +shape of things. list\_indexes @@ -172,9 +276,13 @@ Keys: * action - "delete\_index" * name - string, the index to delete -* design\_doc - string, the design doc id from which to delete the index. For auto-generated index names and design docs, you can retrieve this information from the `list\_indexes` action +* design\_doc - string, the design doc id from which to delete the + index. For auto-generated index names and design docs, you can + retrieve this information from the `list\_indexes` action -Indexes require resources to maintain. If you find that an index is no longer necessary then it can be beneficial to remove it from the database. +Indexes require resources to maintain. If you find that an index is no +longer necessary then it can be beneficial to remove it from the +database. describe\_selector @@ -186,36 +294,55 @@ Keys: * action - "describe\_selector" * selector - JSON object in selector syntax, described below -* extended (optional) (default: false) - Show information on what existing indexes could be used with this selector +* extended (optional) (default: false) - Show information on what + existing indexes could be used with this selector -This is a useful debugging utility that will show how a given selector is normalized before execution as well as information on what indexes could be used to satisfy it. +This is a useful debugging utility that will show how a given selector +is normalized before execution as well as information on what indexes +could be used to satisfy it. -If `"extended": true` is included then the list of existing indices that could be used for this selector are also returned. +If `"extended": true` is included then the list of existing indices +that could be used for this selector are also returned. JSON Syntax Descriptions ======================== -This API uses a few defined JSON structures for various operations. Here we'll describe each in detail. +This API uses a few defined JSON structures for various +operations. Here we'll describe each in detail. Selector Syntax --------------- -The Mango query language is expressed as a JSON object describing documents of interest. Within this structure it is also possible to express conditional logic using specially named fields. This is inspired by and intended to maintain a fairly close parity to the existing MongoDB behavior. +The Mango query language is expressed as a JSON object describing +documents of interest. Within this structure it is also possible to +express conditional logic using specially named fields. This is +inspired by and intended to maintain a fairly close parity to the +existing MongoDB behavior. As an example, the simplest selector for Mango might look something like such: +```json {"_id": "Paul"} +``` -Which would match the document named "Paul" (if one exists). Extending this example using other fields might look like such: +Which would match the document named "Paul" (if one exists). Extending +this example using other fields might look like such: +```json {"_id": "Paul", "location": "Boston"} +``` -This would match a document named "Paul" *AND* having a "location" value of "Boston". Seeing as though I'm sitting in my basement in Omaha, this is unlikely. +This would match a document named "Paul" *AND* having a "location" +value of "Boston". Seeing as though I'm sitting in my basement in +Omaha, this is unlikely. -There are two special syntax elements for the object keys in a selector. The first is that the period (full stop, or simply `.`) character denotes subfields in a document. For instance, here are two equivalent examples: +There are two special syntax elements for the object keys in a +selector. The first is that the period (full stop, or simply `.`) +character denotes subfields in a document. For instance, here are two +equivalent examples: {"location": {"city": "Omaha"}} {"location.city": "Omaha"} @@ -224,26 +351,36 @@ If the object's key contains the period it could be escaped with backslash, i.e. {"location\\.city": "Omaha"} -Note that the double backslash here is necessary to encode an actual single backslash. +Note that the double backslash here is necessary to encode an actual +single backslash. -The second important syntax element is the use of a dollar sign (`$`) prefix to denote operators. For example: +The second important syntax element is the use of a dollar sign (`$`) +prefix to denote operators. For example: {"age": {"$gt": 21}} In this example, we have created the boolean expression `age > 21`. -There are two core types of operators in the selector syntax: combination operators and condition operators. In general, combination operators contain groups of condition operators. We'll describe the list of each below. +There are two core types of operators in the selector syntax: +combination operators and condition operators. In general, combination +operators contain groups of condition operators. We'll describe the +list of each below. ### Implicit Operators -For the most part every operator must be of the form `{"$operator": argument}`. Though there are two implicit operators for selectors. +For the most part every operator must be of the form `{"$operator": +argument}`. Though there are two implicit operators for selectors. -First, any JSON object that is not the argument to a condition operator is an implicit `$and` operator on each field. For instance, these two examples are identical: +First, any JSON object that is not the argument to a condition +operator is an implicit `$and` operator on each field. For instance, +these two examples are identical: {"foo": "bar", "baz": true} {"$and": [{"foo": {"$eq": "bar"}}, {"baz": {"$eq": true}}]} -And as shown, any field that contains a JSON value that has no operators in it is an equality condition. For instance, these are equivalent: +And as shown, any field that contains a JSON value that has no +operators in it is an equality condition. For instance, these are +equivalent: {"foo": "bar"} {"foo": {"$eq": "bar"}} @@ -260,9 +397,12 @@ Although, the previous example would actually be normalized internally to this: ### Combination Operators -These operators are responsible for combining groups of condition operators. Most familiar are the standard boolean operators plus a few extra for working with JSON arrays. +These operators are responsible for combining groups of condition +operators. Most familiar are the standard boolean operators plus a few +extra for working with JSON arrays. -Each of the combining operators take a single argument that is either a condition operator or an array of condition operators. +Each of the combining operators take a single argument that is either +a condition operator or an array of condition operators. The list of combining characters: @@ -276,7 +416,13 @@ The list of combining characters: ### Condition Operators -Condition operators are specified on a per field basis and apply to the value indexed for that field. For instance, the basic "$eq" operator matches when the indexed field is equal to its argument. There is currently support for the basic equality and inequality operators as well as a number of meta operators. Some of these operators will accept any JSON argument while some require a specific JSON formatted argument. Each is noted below. +Condition operators are specified on a per field basis and apply to +the value indexed for that field. For instance, the basic "$eq" +operator matches when the indexed field is equal to its +argument. There is currently support for the basic equality and +inequality operators as well as a number of meta operators. Some of +these operators will accept any JSON argument while some require a +specific JSON formatted argument. Each is noted below. The list of conditional arguments: @@ -291,19 +437,28 @@ The list of conditional arguments: Object related operators -* "$exists" - boolean, check whether the field exists or not regardless of its value +* "$exists" - boolean, check whether the field exists or not + regardless of its value * "$type" - string, check the document field's type Array related operators -* "$in" - array of JSON values, the document field must exist in the list provided -* "$nin" - array of JSON values, the document field must not exist in the list provided -* "$size" - integer, special condition to match the length of an array field in a document. Non-array fields cannot match this condition. +* "$in" - array of JSON values, the document field must exist in the + list provided +* "$nin" - array of JSON values, the document field must not exist in + the list provided +* "$size" - integer, special condition to match the length of an array + field in a document. Non-array fields cannot match this condition. Misc related operators -* "$mod" - [Divisor, Remainder], where Divisor and Remainder are both positive integers (ie, greater than 0). Matches documents where (field % Divisor == Remainder) is true. This is false for any non-integer field -* "$regex" - string, a regular expression pattern to match against the document field. Only matches when the field is a string value and matches the supplied matches +* "$mod" - [Divisor, Remainder], where Divisor and Remainder are both + positive integers (ie, greater than 0). Matches documents where + (field % Divisor == Remainder) is true. This is false for any + non-integer field +* "$regex" - string, a regular expression pattern to match against the + document field. Only matches when the field is a string value and + matches the supplied matches Update Syntax @@ -315,19 +470,30 @@ Need to describe the syntax for update operators. Sort Syntax ----------- -The sort syntax is a basic array of field name and direction pairs. It looks like such: +The sort syntax is a basic array of field name and direction pairs. It +looks like such: [{field1: dir1} | ...] -Where field1 can be any field (dotted notation is available for sub-document fields) and dir1 can be "asc" or "desc". +Where field1 can be any field (dotted notation is available for +sub-document fields) and dir1 can be "asc" or "desc". -Note that it is highly recommended that you specify a single key per object in your sort ordering so that the order is not dependent on the combination of JSON libraries between your application and the internals of Mango's indexing engine. +Note that it is highly recommended that you specify a single key per +object in your sort ordering so that the order is not dependent on the +combination of JSON libraries between your application and the +internals of Mango's indexing engine. Fields Syntax ------------- -When retrieving documents from the database you can specify that only a subset of the fields are returned. This allows you to limit your results strictly to the parts of the document that are interesting for the local application logic. The fields returned are specified as an array. Unlike MongoDB only the fields specified are included, there is no automatic inclusion of the "\_id" or other metadata fields when a field list is included. +When retrieving documents from the database you can specify that only +a subset of the fields are returned. This allows you to limit your +results strictly to the parts of the document that are interesting for +the local application logic. The fields returned are specified as an +array. Unlike MongoDB only the fields specified are included, there is +no automatic inclusion of the "\_id" or other metadata fields when a +field list is included. A trivial example: @@ -344,16 +510,20 @@ POST /dbname/\_find Issue a query. -Request body is a JSON object that has the selector and the various options like limit/skip etc. Or we could post the selector and put the other options into the query string. Though I'd probably prefer to have it all in the body for consistency. +Request body is a JSON object that has the selector and the various +options like limit/skip etc. Or we could post the selector and put the +other options into the query string. Though I'd probably prefer to +have it all in the body for consistency. -Response is streamed out like a view. +Response is streamed out like a view. POST /dbname/\_index -------------------------- Request body contains the index definition. -Response body is empty and the result is returned as the status code (200 OK -> created, 3something for exists). +Response body is empty and the result is returned as the status code +(200 OK -> created, 3something for exists). GET /dbname/\_index ------------------------- diff --git a/src/mango/TODO.md b/src/mango/TODO.md index ce2d85f3dbc..95055dd0688 100644 --- a/src/mango/TODO.md +++ b/src/mango/TODO.md @@ -1,9 +1,18 @@ -* Patch the view engine to do alternative sorts. This will include both the lower level couch\_view* modules as well as the fabric coordinators. +* Patch the view engine to do alternative sorts. This will include + both the lower level couch\_view* modules as well as the fabric + coordinators. -* Patch the view engine so we can specify options when returning docs from cursors. We'll want this so that we can delete specific revisions from a document. +* Patch the view engine so we can specify options when returning docs + from cursors. We'll want this so that we can delete specific + revisions from a document. -* Need to figure out how to do raw collation on some indices because at -least the _id index uses it forcefully. +* Need to figure out how to do raw collation on some indices because + at least the _id index uses it forcefully. -* Add lots more to the update API. Mongo appears to be missing some pretty obvious easy functionality here. Things like managing values doing things like multiplying numbers, or common string mutations would be obvious examples. Also it could be interesting to add to the language so that you can do conditional updates based on other document attributes. Definitely not a V1 endeavor. \ No newline at end of file +* Add lots more to the update API. Mongo appears to be missing some + pretty obvious easy functionality here. Things like managing values + doing things like multiplying numbers, or common string mutations + would be obvious examples. Also it could be interesting to add to + the language so that you can do conditional updates based on other + document attributes. Definitely not a V1 endeavor. diff --git a/src/mango/rebar.config b/src/mango/rebar.config new file mode 100644 index 00000000000..e0d18443bce --- /dev/null +++ b/src/mango/rebar.config @@ -0,0 +1,2 @@ +{cover_enabled, true}. +{cover_print_enabled, true}. diff --git a/src/mango/src/mango_cursor_text.erl b/src/mango/src/mango_cursor_text.erl index 43ef84e4ce5..5989f342e02 100644 --- a/src/mango/src/mango_cursor_text.erl +++ b/src/mango/src/mango_cursor_text.erl @@ -43,7 +43,7 @@ }). -create(Db, Indexes, Selector, Opts0) -> +create(Db, Indexes, Selector, Opts) -> Index = case Indexes of [Index0] -> Index0; @@ -51,7 +51,7 @@ create(Db, Indexes, Selector, Opts0) -> ?MANGO_ERROR(multiple_text_indexes) end, - Opts = unpack_bookmark(couch_db:name(Db), Opts0), + Bookmark = unpack_bookmark(couch_db:name(Db), Opts), DreyfusLimit = get_dreyfus_limit(), Limit = erlang:min(DreyfusLimit, couch_util:get_value(limit, Opts, mango_opts:default_limit())), @@ -66,7 +66,8 @@ create(Db, Indexes, Selector, Opts0) -> opts = Opts, limit = Limit, skip = Skip, - fields = Fields + fields = Fields, + bookmark = Bookmark }}. @@ -90,7 +91,8 @@ execute(Cursor, UserFun, UserAcc) -> skip = Skip, selector = Selector, opts = Opts, - execution_stats = Stats + execution_stats = Stats, + bookmark = Bookmark } = Cursor, Query = mango_selector_text:convert(Selector), QueryArgs = #index_query_args{ @@ -104,7 +106,7 @@ execute(Cursor, UserFun, UserAcc) -> dbname = couch_db:name(Db), ddocid = ddocid(Idx), idx_name = mango_idx:name(Idx), - bookmark = get_bookmark(Opts), + bookmark = Bookmark, limit = Limit, skip = Skip, query_args = QueryArgs, @@ -282,7 +284,7 @@ pack_bookmark(Bookmark) -> unpack_bookmark(DbName, Opts) -> - NewBM = case lists:keyfind(bookmark, 1, Opts) of + case lists:keyfind(bookmark, 1, Opts) of {_, nil} -> []; {_, Bin} -> @@ -291,8 +293,7 @@ unpack_bookmark(DbName, Opts) -> catch _:_ -> ?MANGO_ERROR({invalid_bookmark, Bin}) end - end, - lists:keystore(bookmark, 1, Opts, {bookmark, NewBM}). + end. ddocid(Idx) -> diff --git a/src/mango/src/mango_cursor_view.erl b/src/mango/src/mango_cursor_view.erl index 240ef501da3..68d7c3b6228 100644 --- a/src/mango/src/mango_cursor_view.erl +++ b/src/mango/src/mango_cursor_view.erl @@ -116,7 +116,11 @@ base_args(#cursor{index = Idx, selector = Selector} = Cursor) -> start_key = StartKey, end_key = EndKey, include_docs = true, - extra = [{callback, {?MODULE, view_cb}}, {selector, Selector}] + extra = [ + {callback, {?MODULE, view_cb}}, + {selector, Selector}, + {ignore_partition_query_limit, true} + ] }. diff --git a/src/mango/src/mango_httpd.erl b/src/mango/src/mango_httpd.erl index 379d2e12713..a3f13212403 100644 --- a/src/mango/src/mango_httpd.erl +++ b/src/mango/src/mango_httpd.erl @@ -54,6 +54,8 @@ handle_req_int(#httpd{path_parts=[_, <<"_explain">> | _]} = Req, Db) -> handle_explain_req(Req, Db); handle_req_int(#httpd{path_parts=[_, <<"_find">> | _]} = Req, Db) -> handle_find_req(Req, Db); +handle_req_int(#httpd{path_parts=[_, <<"_update">> | _]} = Req, Db) -> + handle_update_req(Req, Db); handle_req_int(_, _) -> throw({not_found, missing}). @@ -198,6 +200,34 @@ handle_find_req(Req, _Db) -> chttpd:send_method_not_allowed(Req, "POST"). +handle_update_req(#httpd{method='GET'}=Req, Db) -> + % TODO retrieve vdus + ok. + +handle_update_req(#httpd{method='POST'}=Req, Db) -> + % submit a doc for validation + chttpd:validate_ctype(Req, "application/json"), + Doc = chttpd:json_body_obj(Req), + case mango_opts:validate_update(Req, Db, Doc) of + ok -> + couch_db:update_doc(Db, Doc, []), + chttpd:send_json(Req, 201, {[{ok, true}]}); + {error, Message} -> + chttpd:send_error(Req, 400, <<"bad_request">>, Message) + end; + +handle_update_req(#httpd{method='PUT'}=Req, Db) -> + % TODO add new vdu + ok. + +handle_update_req(#httpd{method='DELETE'}=Req, Db) -> + % TODO delete a vdu + ok. + +handle_update_req(Req, _Db) -> + chttpd:send_method_not_allowed(Req, "GET,PUT,POST,DELETE"). + + set_user_ctx(#httpd{user_ctx=Ctx}, Db) -> {ok, NewDb} = couch_db:set_user_ctx(Db, Ctx), NewDb. diff --git a/src/mango/src/mango_httpd_handlers.erl b/src/mango/src/mango_httpd_handlers.erl index 80e5e277e2d..c8694b43aa4 100644 --- a/src/mango/src/mango_httpd_handlers.erl +++ b/src/mango/src/mango_httpd_handlers.erl @@ -19,6 +19,7 @@ url_handler(_) -> no_match. db_handler(<<"_index">>) -> fun mango_httpd:handle_req/2; db_handler(<<"_explain">>) -> fun mango_httpd:handle_req/2; db_handler(<<"_find">>) -> fun mango_httpd:handle_req/2; +db_handler(<<"_update">>) -> fun mango_httpd:handle_req/2; db_handler(_) -> no_match. design_handler(_) -> no_match. diff --git a/src/mango/src/mango_json_bookmark.erl b/src/mango/src/mango_json_bookmark.erl index 97f81cfb8c6..83fd00f2914 100644 --- a/src/mango/src/mango_json_bookmark.erl +++ b/src/mango/src/mango_json_bookmark.erl @@ -54,7 +54,7 @@ unpack(nil) -> nil; unpack(Packed) -> try - Bookmark = binary_to_term(couch_util:decodeBase64Url(Packed)), + Bookmark = binary_to_term(couch_util:decodeBase64Url(Packed), [safe]), verify(Bookmark) catch _:_ -> ?MANGO_ERROR({invalid_bookmark, Packed}) diff --git a/src/mango/src/mango_opts.erl b/src/mango/src/mango_opts.erl index 92c07f743b7..e7acce12970 100644 --- a/src/mango/src/mango_opts.erl +++ b/src/mango/src/mango_opts.erl @@ -14,7 +14,8 @@ -export([ validate_idx_create/1, - validate_find/1 + validate_find/1, + validate_update/3 ]). -export([ @@ -170,6 +171,10 @@ validate_find({Props}) -> validate(Props, Opts). +validate_update(Req, Db, Doc) -> + ok. % TODO actually validate + + validate_bulk_delete({Props}) -> Opts = [ {<<"docids">>, [ diff --git a/src/mango/src/mango_selector.erl b/src/mango/src/mango_selector.erl index 3ea83c22063..fc6a6d1a78d 100644 --- a/src/mango/src/mango_selector.erl +++ b/src/mango/src/mango_selector.erl @@ -138,6 +138,11 @@ norm_ops({[{<<"$allMatch">>, {_}=Arg}]}) -> norm_ops({[{<<"$allMatch">>, Arg}]}) -> ?MANGO_ERROR({bad_arg, '$allMatch', Arg}); +norm_ops({[{<<"$keyMapMatch">>, {_}=Arg}]}) -> + {[{<<"$keyMapMatch">>, norm_ops(Arg)}]}; +norm_ops({[{<<"$keyMapMatch">>, Arg}]}) -> + ?MANGO_ERROR({bad_arg, '$keyMapMatch', Arg}); + norm_ops({[{<<"$size">>, Arg}]}) when is_integer(Arg), Arg >= 0 -> {[{<<"$size">>, Arg}]}; norm_ops({[{<<"$size">>, Arg}]}) -> @@ -253,6 +258,10 @@ norm_fields({[{<<"$allMatch">>, Arg}]}, Path) -> Cond = {[{<<"$allMatch">>, norm_fields(Arg)}]}, {[{Path, Cond}]}; +norm_fields({[{<<"$keyMapMatch">>, Arg}]}, Path) -> + Cond = {[{<<"$keyMapMatch">>, norm_fields(Arg)}]}, + {[{Path, Cond}]}; + % The text operator operates against the internal % $default field. This also asserts that the $default @@ -334,6 +343,9 @@ norm_negations({[{<<"$elemMatch">>, Arg}]}) -> norm_negations({[{<<"$allMatch">>, Arg}]}) -> {[{<<"$allMatch">>, norm_negations(Arg)}]}; +norm_negations({[{<<"$keyMapMatch">>, Arg}]}) -> + {[{<<"$keyMapMatch">>, norm_negations(Arg)}]}; + % All other conditions can't introduce negations anywhere % further down the operator tree. norm_negations(Cond) -> @@ -421,7 +433,7 @@ match({[{<<"$not">>, Arg}]}, Value, Cmp) -> not match(Arg, Value, Cmp); match({[{<<"$all">>, []}]}, _, _) -> - true; + false; % All of the values in Args must exist in Values or % Values == hd(Args) if Args is a single element list % that contains a list. @@ -491,6 +503,26 @@ match({[{<<"$allMatch">>, Arg}]}, [_ | _] = Values, Cmp) -> match({[{<<"$allMatch">>, _Arg}]}, _Value, _Cmp) -> false; +% Matches when any key in the map value matches the +% sub-selector Arg. +match({[{<<"$keyMapMatch">>, Arg}]}, Value, Cmp) when is_tuple(Value) -> + try + lists:foreach(fun(V) -> + case match(Arg, V, Cmp) of + true -> throw(matched); + _ -> ok + end + end, [Key || {Key, _} <- element(1, Value)]), + false + catch + throw:matched -> + true; + _:_ -> + false + end; +match({[{<<"$keyMapMatch">>, _Arg}]}, _Value, _Cmp) -> + false; + % Our comparison operators are fairly straight forward match({[{<<"$lt">>, Arg}]}, Value, Cmp) -> Cmp(Value, Arg) < 0; @@ -506,7 +538,7 @@ match({[{<<"$gt">>, Arg}]}, Value, Cmp) -> Cmp(Value, Arg) > 0; match({[{<<"$in">>, []}]}, _, _) -> - true; + false; match({[{<<"$in">>, Args}]}, Values, Cmp) when is_list(Values)-> Pred = fun(Arg) -> lists:foldl(fun(Value,Match) -> diff --git a/src/mango/test/03-operator-test.py b/src/mango/test/03-operator-test.py index 935f470bb97..a67ef91f337 100644 --- a/src/mango/test/03-operator-test.py +++ b/src/mango/test/03-operator-test.py @@ -66,6 +66,15 @@ def test_empty_all_match(self): docs = self.db.find({"emptybang": {"$allMatch": {"foo": {"$eq": 2}}}}) self.assertEqual(len(docs), 0) + def test_keymap_match(self): + amdocs = [ + {"foo": {"aa": "bar", "bb": "bang"}}, + {"foo": {"cc": "bar", "bb": "bang"}}, + ] + self.db.save_docs(amdocs, w=3) + docs = self.db.find({"foo": {"$keyMapMatch": {"$eq": "aa"}}}) + self.assertEqual(len(docs), 1) + def test_in_operator_array(self): docs = self.db.find({"manager": True, "favorites": {"$in": ["Ruby", "Python"]}}) self.assertUserIds([2, 6, 7, 9, 11, 12], docs) diff --git a/src/mango/test/08-text-limit-test.py b/src/mango/test/08-text-limit-test.py index ae827813d7c..ef0509ff3a6 100644 --- a/src/mango/test/08-text-limit-test.py +++ b/src/mango/test/08-text-limit-test.py @@ -133,3 +133,13 @@ def run_bookmark_sort_check(self, size): assert json["bookmark"] != bm bm = json["bookmark"] assert len(seen_docs) == len(limit_docs.DOCS) + + def run_explain_check(self, size): + q = {"age": {"$gt": 0}} + seen_docs = set() + bm = None + results1 = self.db.find(q, limit=size, bookmark=bm, return_raw=True) + assert results1["bookmark"] != bm + bm = results1["bookmark"] + results2 = self.db.find(q, limit=size, bookmark=bm, explain=True) + assert results2["bookmark"] == bm diff --git a/src/mango/test/21-empty-selector-tests.py b/src/mango/test/21-empty-selector-tests.py index beb222c85b7..8fd76fcd5e0 100644 --- a/src/mango/test/21-empty-selector-tests.py +++ b/src/mango/test/21-empty-selector-tests.py @@ -35,14 +35,32 @@ def test_empty_array_or_with_age(self): docs = self.db.find({"age": 22, "$or": []}) assert len(docs) == 1 + def test_empty_array_in_with_age(self): + resp = self.db.find({"age": 22, "company": {"$in": []}}, explain=True) + self.assertEqual(resp["index"]["type"], klass.INDEX_TYPE) + docs = self.db.find({"age": 22, "company": {"$in": []}}) + assert len(docs) == 0 + def test_empty_array_and_with_age(self): - resp = self.db.find( - {"age": 22, "$and": [{"b": {"$all": []}}]}, explain=True - ) + resp = self.db.find({"age": 22, "$and": []}, explain=True) self.assertEqual(resp["index"]["type"], klass.INDEX_TYPE) docs = self.db.find({"age": 22, "$and": []}) assert len(docs) == 1 + def test_empty_array_all_age(self): + resp = self.db.find({"age": 22, "company": {"$all": []}}, explain=True) + self.assertEqual(resp["index"]["type"], klass.INDEX_TYPE) + docs = self.db.find({"age": 22, "company": {"$all": []}}) + assert len(docs) == 0 + + def test_empty_array_nested_all_with_age(self): + resp = self.db.find( + {"age": 22, "$and": [{"company": {"$all": []}}]}, explain=True + ) + self.assertEqual(resp["index"]["type"], klass.INDEX_TYPE) + docs = self.db.find({"age": 22, "$and": [{"company": {"$all": []}}]}) + assert len(docs) == 0 + def test_empty_arrays_complex(self): resp = self.db.find({"$or": [], "a": {"$in": []}}, explain=True) self.assertEqual(resp["index"]["type"], klass.INDEX_TYPE) diff --git a/src/mango/test/README.md b/src/mango/test/README.md index 509e32e4777..9eae278b142 100644 --- a/src/mango/test/README.md +++ b/src/mango/test/README.md @@ -11,7 +11,7 @@ To run these, do this in the Mango top level directory: $ venv/bin/nosetests To run an individual test suite: - nosetests --nocapture test/12-use-correct-index.py + nosetests --nocapture test/12-use-correct-index.py To run the tests with text index support: MANGO_TEXT_INDEXES=1 nosetests --nocapture test @@ -22,8 +22,13 @@ Test configuration The following environment variables can be used to configure the test fixtures: - * `COUCH_HOST` - root url (including port) of the CouchDB instance to run the tests against. Default is `"http://127.0.0.1:15984"`. - * `COUCH_USER` - CouchDB username (with admin premissions). Default is `"testuser"`. - * `COUCH_PASSWORD` - CouchDB password. Default is `"testpass"`. - * `COUCH_AUTH_HEADER` - Optional Authorization header value. If specified, this is used instead of basic authentication with the username/password variables above. - * `MANGO_TEXT_INDEXES` - Set to `"1"` to run the tests only applicable to text indexes. + * `COUCH_HOST` - root url (including port) of the CouchDB instance to + run the tests against. Default is `"http://127.0.0.1:15984"`. + * `COUCH_USER` - CouchDB username (with admin premissions). Default + is `"adm"`. + * `COUCH_PASSWORD` - CouchDB password. Default is `"pass"`. + * `COUCH_AUTH_HEADER` - Optional Authorization header value. If + specified, this is used instead of basic authentication with the + username/password variables above. + * `MANGO_TEXT_INDEXES` - Set to `"1"` to run the tests only + applicable to text indexes. diff --git a/src/mango/test/mango.py b/src/mango/test/mango.py index 03cb85f48f5..e78160f57ff 100644 --- a/src/mango/test/mango.py +++ b/src/mango/test/mango.py @@ -299,6 +299,10 @@ def setUpClass(klass): klass.db = Database("_users") user_docs.setup_users(klass.db) + @classmethod + def tearDownClass(klass): + user_docs.teardown_users(klass.db) + def setUp(self): self.db = self.__class__.db @@ -309,6 +313,10 @@ def setUpClass(klass): klass.db = Database(random_db_name()) klass.db.create(q=1, n=1) + @classmethod + def tearDownClass(klass): + klass.db.delete() + def setUp(self): self.db = self.__class__.db diff --git a/src/mango/test/user_docs.py b/src/mango/test/user_docs.py index 8f0ed2e0453..617b430c7a0 100644 --- a/src/mango/test/user_docs.py +++ b/src/mango/test/user_docs.py @@ -59,6 +59,10 @@ def setup_users(db, **kwargs): db.save_docs(copy.deepcopy(USERS_DOCS)) +def teardown_users(db): + [db.delete_doc(doc["_id"]) for doc in USERS_DOCS] + + def setup(db, index_type="view", **kwargs): db.recreate() db.save_docs(copy.deepcopy(DOCS)) diff --git a/src/mem3/src/mem3_reshard_dbdoc.erl b/src/mem3/src/mem3_reshard_dbdoc.erl index 7eb3e9f1366..4a0a35c1f7d 100644 --- a/src/mem3/src/mem3_reshard_dbdoc.erl +++ b/src/mem3/src/mem3_reshard_dbdoc.erl @@ -146,9 +146,8 @@ replicate_to_all_nodes(TimeoutMSec) -> write_shard_doc(#doc{id = Id} = Doc, Body) -> - DbName = ?l2b(config:get("mem3", "shards_db", "_dbs")), UpdatedDoc = Doc#doc{body = Body}, - couch_util:with_db(DbName, fun(Db) -> + couch_util:with_db(mem3_sync:shards_db(), fun(Db) -> try {ok, _} = couch_db:update_doc(Db, UpdatedDoc, []) catch diff --git a/src/mem3/src/mem3_rpc.erl b/src/mem3/src/mem3_rpc.erl index 0991aa745ad..5d1c62c065c 100644 --- a/src/mem3/src/mem3_rpc.erl +++ b/src/mem3/src/mem3_rpc.erl @@ -401,7 +401,7 @@ rexi_call(Node, MFA, Timeout) -> get_or_create_db(DbName, Options) -> - couch_db:open_int(DbName, [{create_if_missing, true} | Options]). + mem3_util:get_or_create_db(DbName, Options). -ifdef(TEST). diff --git a/src/mem3/src/mem3_shards.erl b/src/mem3/src/mem3_shards.erl index 110e227dd9e..4f332374048 100644 --- a/src/mem3/src/mem3_shards.erl +++ b/src/mem3/src/mem3_shards.erl @@ -20,6 +20,7 @@ -export([handle_config_change/5, handle_config_terminate/3]). -export([start_link/0]). +-export([opts_for_db/1]). -export([for_db/1, for_db/2, for_docid/2, for_docid/3, get/3, local/1, fold/2]). -export([for_shard_range/1]). -export([set_max_size/1]). @@ -45,6 +46,15 @@ start_link() -> gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). +opts_for_db(DbName) -> + {ok, Db} = mem3_util:ensure_exists(mem3_sync:shards_db()), + case couch_db:open_doc(Db, DbName, [ejson_body]) of + {ok, #doc{body = {Props}}} -> + mem3_util:get_shard_opts(Props); + {not_found, _} -> + erlang:error(database_does_not_exist, ?b2l(DbName)) + end. + for_db(DbName) -> for_db(DbName, []). @@ -144,8 +154,7 @@ local(DbName) -> lists:filter(Pred, for_db(DbName)). fold(Fun, Acc) -> - DbName = config:get("mem3", "shards_db", "_dbs"), - {ok, Db} = mem3_util:ensure_exists(DbName), + {ok, Db} = mem3_util:ensure_exists(mem3_sync:shards_db()), FAcc = {Db, Fun, Acc}, try {ok, LastAcc} = couch_db:fold_docs(Db, fun fold_fun/2, FAcc), @@ -309,15 +318,13 @@ fold_fun(#doc_info{}=DI, {Db, UFun, UAcc}) -> end. get_update_seq() -> - DbName = config:get("mem3", "shards_db", "_dbs"), - {ok, Db} = mem3_util:ensure_exists(DbName), + {ok, Db} = mem3_util:ensure_exists(mem3_sync:shards_db()), Seq = couch_db:get_update_seq(Db), couch_db:close(Db), Seq. listen_for_changes(Since) -> - DbName = config:get("mem3", "shards_db", "_dbs"), - {ok, Db} = mem3_util:ensure_exists(DbName), + {ok, Db} = mem3_util:ensure_exists(mem3_sync:shards_db()), Args = #changes_args{ feed = "continuous", since = Since, @@ -362,8 +369,7 @@ changes_callback(timeout, _) -> load_shards_from_disk(DbName) when is_binary(DbName) -> couch_stats:increment_counter([mem3, shard_cache, miss]), - X = ?l2b(config:get("mem3", "shards_db", "_dbs")), - {ok, Db} = mem3_util:ensure_exists(X), + {ok, Db} = mem3_util:ensure_exists(mem3_sync:shards_db()), try load_shards_from_db(Db, DbName) after diff --git a/src/mem3/src/mem3_sync_event_listener.erl b/src/mem3/src/mem3_sync_event_listener.erl index b6fbe32794f..cad34225db5 100644 --- a/src/mem3/src/mem3_sync_event_listener.erl +++ b/src/mem3/src/mem3_sync_event_listener.erl @@ -236,7 +236,7 @@ teardown_all(_) -> setup() -> {ok, Pid} = ?MODULE:start_link(), erlang:unlink(Pid), - meck:wait(config_notifier, subscribe, '_', 1000), + wait_config_subscribed(Pid), Pid. teardown(Pid) -> @@ -338,4 +338,16 @@ wait_state(Pid, Field, Val) when is_pid(Pid), is_integer(Field) -> end, test_util:wait(WaitFun). + +wait_config_subscribed(Pid) -> + WaitFun = fun() -> + Handlers = gen_event:which_handlers(config_event), + Pids = [Id || {config_notifier, Id} <- Handlers], + case lists:member(Pid, Pids) of + true -> true; + false -> wait + end + end, + test_util:wait(WaitFun). + -endif. diff --git a/src/mem3/src/mem3_util.erl b/src/mem3/src/mem3_util.erl index 3fc9b4f8eb7..28cb1777807 100644 --- a/src/mem3/src/mem3_util.erl +++ b/src/mem3/src/mem3_util.erl @@ -14,8 +14,10 @@ -export([name_shard/2, create_partition_map/5, build_shards/2, n_val/2, q_val/1, to_atom/1, to_integer/1, write_db_doc/1, delete_db_doc/1, - shard_info/1, ensure_exists/1, open_db_doc/1]). + shard_info/1, ensure_exists/1, open_db_doc/1, get_or_create_db/2]). -export([is_deleted/1, rotate_list/2]). +-export([get_shard_opts/1, get_engine_opt/1, get_props_opt/1]). +-export([get_shard_props/1, find_dirty_shards/0]). -export([ iso8601_timestamp/0, live_nodes/0, @@ -87,13 +89,11 @@ attach_nodes([S | Rest], Acc, [Node | Nodes], UsedNodes) -> attach_nodes(Rest, [S#shard{node=Node} | Acc], Nodes, [Node | UsedNodes]). open_db_doc(DocId) -> - DbName = ?l2b(config:get("mem3", "shards_db", "_dbs")), - {ok, Db} = couch_db:open(DbName, [?ADMIN_CTX]), + {ok, Db} = couch_db:open(mem3_sync:shards_db(), [?ADMIN_CTX]), try couch_db:open_doc(Db, DocId, [ejson_body]) after couch_db:close(Db) end. write_db_doc(Doc) -> - DbName = ?l2b(config:get("mem3", "shards_db", "_dbs")), - write_db_doc(DbName, Doc, true). + write_db_doc(mem3_sync:shards_db(), Doc, true). write_db_doc(DbName, #doc{id=Id, body=Body} = Doc, ShouldMutate) -> {ok, Db} = couch_db:open(DbName, [?ADMIN_CTX]), @@ -118,8 +118,7 @@ write_db_doc(DbName, #doc{id=Id, body=Body} = Doc, ShouldMutate) -> delete_db_doc(DocId) -> gen_server:cast(mem3_shards, {cache_remove, DocId}), - DbName = ?l2b(config:get("mem3", "shards_db", "_dbs")), - delete_db_doc(DbName, DocId, true). + delete_db_doc(mem3_sync:shards_db(), DocId, true). delete_db_doc(DbName, DocId, ShouldMutate) -> {ok, Db} = couch_db:open(DbName, [?ADMIN_CTX]), @@ -324,7 +323,7 @@ live_nodes() -> % which could be a while. % replicate_dbs_to_all_nodes(Timeout) -> - DbName = ?l2b(config:get("mem3", "shards_db", "_dbs")), + DbName = mem3_sync:shards_db(), Targets= mem3_util:live_nodes() -- [node()], Res = [start_replication(node(), T, DbName, Timeout) || T <- Targets], collect_replication_results(Res, Timeout). @@ -335,7 +334,7 @@ replicate_dbs_to_all_nodes(Timeout) -> % them until they are all done. % replicate_dbs_from_all_nodes(Timeout) -> - DbName = ?l2b(config:get("mem3", "shards_db", "_dbs")), + DbName = mem3_sync:shards_db(), Sources = mem3_util:live_nodes() -- [node()], Res = [start_replication(S, node(), DbName, Timeout) || S <- Sources], collect_replication_results(Res, Timeout). @@ -509,6 +508,75 @@ sort_ranges_fun({B1, _}, {B2, _}) -> B1 =< B2. +get_or_create_db(DbName, Options) -> + case couch_db:open_int(DbName, Options) of + {ok, _} = OkDb -> + OkDb; + {not_found, no_db_file} -> + try + DbOpts = case mem3:dbname(DbName) of + DbName -> []; + MDbName -> mem3_shards:opts_for_db(MDbName) + end, + Options1 = [{create_if_missing, true} | Options], + Options2 = merge_opts(DbOpts, Options1), + couch_db:open_int(DbName, Options2) + catch error:database_does_not_exist -> + throw({error, missing_target}) + end; + Else -> + Else + end. + + +%% merge two proplists, atom options only valid in Old +merge_opts(New, Old) -> + lists:foldl(fun({Key, Val}, Acc) -> + lists:keystore(Key, 1, Acc, {Key, Val}) + end, Old, New). + + +get_shard_props(ShardName) -> + case couch_db:open_int(ShardName, []) of + {ok, Db} -> + Props = case couch_db_engine:get_props(Db) of + undefined -> []; + Else -> Else + end, + %% We don't normally store the default engine name + EngineProps = case couch_db_engine:get_engine(Db) of + couch_bt_engine -> + []; + EngineName -> + [{engine, EngineName}] + end, + [{props, Props} | EngineProps]; + {not_found, _} -> + not_found; + Else -> + Else + end. + + +find_dirty_shards() -> + mem3_shards:fold(fun(#shard{node=Node, name=Name, opts=Opts}=Shard, Acc) -> + case Opts of + [] -> + Acc; + [{props, []}] -> + Acc; + _ -> + Props = rpc:call(Node, ?MODULE, get_shard_props, [Name]), + case Props =:= Opts of + true -> + Acc; + false -> + [{Shard, Props} | Acc] + end + end + end, []). + + -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). diff --git a/src/setup/src/setup.erl b/src/setup/src/setup.erl index 3d23229b82c..e681864c7ce 100644 --- a/src/setup/src/setup.erl +++ b/src/setup/src/setup.erl @@ -198,6 +198,9 @@ setup_node(NewCredentials, NewBindAddress, NodeCount, Port) -> finish_cluster(Options) -> + % ensure that uuid is set + couch_server:get_uuid(), + ok = wait_connected(), ok = sync_admins(), ok = sync_uuid(), @@ -262,7 +265,7 @@ sync_config(Section, Key, Value) -> ok -> ok; error -> - log:error("~p sync_admin results ~p errors ~p", + couch_log:error("~p sync_admin results ~p errors ~p", [?MODULE, Results, Errors]), Reason = "Cluster setup unable to sync admin passwords", throw({setup_error, Reason}) diff --git a/src/setup/src/setup_httpd.erl b/src/setup/src/setup_httpd.erl index f4e05ce09a6..949675b6a1d 100644 --- a/src/setup/src/setup_httpd.erl +++ b/src/setup/src/setup_httpd.erl @@ -31,24 +31,30 @@ handle_setup_req(#httpd{method='GET'}=Req) -> ok = chttpd:verify_is_server_admin(Req), Dbs = chttpd:qs_json_value(Req, "ensure_dbs_exist", setup:cluster_system_dbs()), couch_log:notice("Dbs: ~p~n", [Dbs]), - case erlang:list_to_integer(config:get("cluster", "n", undefined)) of - 1 -> - case setup:is_single_node_enabled(Dbs) of - false -> - chttpd:send_json(Req, 200, {[{state, single_node_disabled}]}); - true -> - chttpd:send_json(Req, 200, {[{state, single_node_enabled}]}) - end; + SingleNodeConfig = config:get_boolean("couchdb", "single_node", false), + case SingleNodeConfig of + true -> + chttpd:send_json(Req, 200, {[{state, single_node_enabled}]}); _ -> - case setup:is_cluster_enabled() of - false -> - chttpd:send_json(Req, 200, {[{state, cluster_disabled}]}); - true -> - case setup:has_cluster_system_dbs(Dbs) of + case config:get("cluster", "n", undefined) of + "1" -> + case setup:is_single_node_enabled(Dbs) of false -> - chttpd:send_json(Req, 200, {[{state, cluster_enabled}]}); + chttpd:send_json(Req, 200, {[{state, single_node_disabled}]}); true -> - chttpd:send_json(Req, 200, {[{state, cluster_finished}]}) + chttpd:send_json(Req, 200, {[{state, single_node_enabled}]}) + end; + _ -> + case setup:is_cluster_enabled() of + false -> + chttpd:send_json(Req, 200, {[{state, cluster_disabled}]}); + true -> + case setup:has_cluster_system_dbs(Dbs) of + false -> + chttpd:send_json(Req, 200, {[{state, cluster_enabled}]}); + true -> + chttpd:send_json(Req, 200, {[{state, cluster_finished}]}) + end end end end; diff --git a/src/smoosh/src/smoosh_channel.erl b/src/smoosh/src/smoosh_channel.erl index d8a8d14a97c..2bc98be9ded 100644 --- a/src/smoosh/src/smoosh_channel.erl +++ b/src/smoosh/src/smoosh_channel.erl @@ -122,10 +122,9 @@ handle_info({'DOWN', Ref, _, Job, Reason}, State0) -> #state{active=Active0, starting=Starting0} = State, case lists:keytake(Job, 2, Active0) of {value, {Key, _Pid}, Active1} -> - couch_log:warning("exit for compaction of ~p: ~p", [ - smoosh_utils:stringify(Key), Reason]), - {ok, _} = timer:apply_after(5000, smoosh_server, enqueue, [Key]), - {noreply, maybe_start_compaction(State#state{active=Active1})}; + State1 = maybe_remonitor_cpid(State#state{active=Active1}, Key, + Reason), + {noreply, maybe_start_compaction(State1)}; false -> case lists:keytake(Ref, 1, Starting0) of {value, {_, Key}, Starting1} -> @@ -281,8 +280,7 @@ start_compact(State, Db) -> Ref = erlang:monitor(process, DbPid), DbPid ! {'$gen_call', {self(), Ref}, start_compact}, State#state{starting=[{Ref, Key}|State#state.starting]}; - % database is still compacting so we can just monitor the existing - % compaction pid + % Compaction is already running, so monitor existing compaction pid. CPid -> couch_log:notice("Db ~s continuing compaction", [smoosh_utils:stringify(Key)]), @@ -293,6 +291,27 @@ start_compact(State, Db) -> false end. +maybe_remonitor_cpid(State, DbName, Reason) when is_binary(DbName) -> + {ok, Db} = couch_db:open_int(DbName, []), + case couch_db:get_compactor_pid_sync(Db) of + nil -> + couch_log:warning("exit for compaction of ~p: ~p", + [smoosh_utils:stringify(DbName), Reason]), + {ok, _} = timer:apply_after(5000, smoosh_server, enqueue, [DbName]), + State; + CPid -> + couch_log:notice("~s compaction already running. Re-monitor Pid ~p", + [smoosh_utils:stringify(DbName), CPid]), + erlang:monitor(process, CPid), + State#state{active=[{DbName, CPid}|State#state.active]} + end; +% not a database compaction, so ignore the pid check +maybe_remonitor_cpid(State, Key, Reason) -> + couch_log:warning("exit for compaction of ~p: ~p", + [smoosh_utils:stringify(Key), Reason]), + {ok, _} = timer:apply_after(5000, smoosh_server, enqueue, [Key]), + State. + schedule_unpause() -> WaitSecs = list_to_integer(config:get("smoosh", "wait_secs", "30")), erlang:send_after(WaitSecs * 1000, self(), unpause). diff --git a/support/build_js.escript b/support/build_js.escript index 90ad3168f27..2d9de611211 100644 --- a/support/build_js.escript +++ b/support/build_js.escript @@ -66,6 +66,12 @@ main([]) -> "share/server/rewrite_fun.js" ]; "60" -> + [ + "share/server/60/esprima.js", + "share/server/60/escodegen.js", + "share/server/60/rewrite_fun.js" + ]; + "68" -> [ "share/server/60/esprima.js", "share/server/60/escodegen.js", diff --git a/test/elixir/README.md b/test/elixir/README.md index 90b2fd6019c..13d74a46346 100644 --- a/test/elixir/README.md +++ b/test/elixir/README.md @@ -42,34 +42,35 @@ X means done, - means partially - [X] Port conflicts.js - [X] Port cookie_auth.js - [X] Port copy_doc.js - - [ ] Port design_docs.js - - [ ] Port design_options.js - - [ ] Port design_paths.js + - [X] Port design_docs.js + - [X] Port design_docs_query.js + - [X] Port design_options.js + - [X] Port design_paths.js - [X] Port erlang_views.js - [X] Port etags_head.js - [ ] ~~Port etags_views.js~~ (skipped in js test suite) - - [ ] Port form_submit.js - - [ ] Port http.js + - [X] Port form_submit.js + - [X] Port http.js - [X] Port invalid_docids.js - - [ ] Port jsonp.js + - [X] Port jsonp.js - [X] Port large_docs.js - - [ ] Port list_views.js + - [X] Port list_views.js - [X] Port lorem_b64.txt - [X] Port lorem.txt - [X] Port lots_of_docs.js - - [ ] Port method_override.js + - [X] Port method_override.js - [X] Port multiple_rows.js - - [ ] Port proxyauth.js - - [ ] Port purge.js - - [ ] Port reader_acl.js - - [ ] Port recreate_doc.js - - [ ] Port reduce_builtin.js - - [ ] Port reduce_false.js - - [ ] Port reduce_false_temp.js + - [X] Port proxyauth.js + - [X] Port purge.js + - [X] Port reader_acl.js + - [X] Port recreate_doc.js + - [X] Port reduce_builtin.js + - [X] Port reduce_false.js + - [ ] ~~Port reduce_false_temp.js~~ - [X] Port reduce.js - [X] Port replication.js - - [ ] Port replicator_db_bad_rep_id.js - - [ ] Port replicator_db_by_doc_id.js + - [X] Port replicator_db_bad_rep_id.js + - [X] Port replicator_db_by_doc_id.js - [ ] Port replicator_db_compact_rep_db.js - [ ] Port replicator_db_continuous.js - [ ] Port replicator_db_credential_delegation.js @@ -86,30 +87,30 @@ X means done, - means partially - [ ] Port replicator_db_update_security.js - [ ] Port replicator_db_user_ctx.js - [ ] Port replicator_db_write_auth.js - - [ ] Port rev_stemming.js + - [X] Port rev_stemming.js - [X] Port rewrite.js - - [ ] Port rewrite_js.js + - [X] Port rewrite_js.js - [X] Port security_validation.js - - [ ] Port show_documents.js + - [X] Port show_documents.js - [ ] Port stats.js - - [ ] Port update_documents.js + - [X] Port update_documents.js - [X] Port users_db.js - - [ ] Port users_db_security.js + - [X] Port users_db_security.js - [X] Port utf8.js - [X] Port uuids.js - [X] Port view_collation.js - - [ ] Port view_collation_raw.js - - [ ] Port view_compaction.js - - [ ] Port view_conflicts.js - - [ ] Port view_errors.js - - [ ] Port view_include_docs.js - - [ ] Port view_multi_key_all_docs.js - - [ ] Port view_multi_key_design.js - - [ ] Port view_multi_key_temp.js - - [ ] Port view_offsets.js - - [ ] Port view_pagination.js - - [ ] Port view_sandboxing.js - - [ ] Port view_update_seq.js + - [X] Port view_collation_raw.js + - [X] Port view_compaction.js + - [X] Port view_conflicts.js + - [X] Port view_errors.js + - [X] Port view_include_docs.js + - [X] Port view_multi_key_all_docs.js + - [X] Port view_multi_key_design.js + - [ ] ~~Port view_multi_key_temp.js~~ + - [X] Port view_offsets.js + - [X] Port view_pagination.js + - [X] Port view_sandboxing.js + - [X] Port view_update_seq.js # Using ExUnit to write unit tests @@ -119,8 +120,8 @@ Bellow we present a few use cases where code-generation is really helpful. ## How to write ExUnit tests -1. Create new file in test/exunit/ directory (the file name should match *_test.exs) -2. In case it is a first file in the directory create test_helper.exs (look at src/couch/test/exunit/test_helper.exs to get an idea) +1. Create new file in test/exunit/ directory (the file name should match `*_test.exs`) +2. In case it is a first file in the directory create `test_helper.exs` (look at `src/couch/test/exunit/test_helper.exs` to get an idea) 3. define test module which does `use Couch.Test.ExUnit.Case` 4. Define test cases in the module diff --git a/test/elixir/lib/couch.ex b/test/elixir/lib/couch.ex index 3aef07f01a0..d9751c416d3 100644 --- a/test/elixir/lib/couch.ex +++ b/test/elixir/lib/couch.ex @@ -40,15 +40,28 @@ defmodule Couch.Session do # Skipping head/patch/options for YAGNI. Feel free to add # if the need arises. - def go(%Couch.Session{} = sess, method, url, opts) do - opts = Keyword.merge(opts, cookie: sess.cookie) - Couch.request(method, url, opts) + parse_response = Keyword.get(opts, :parse_response, true) + opts = opts + |> Keyword.merge(cookie: sess.cookie) + |> Keyword.delete(:parse_response) + if parse_response do + Couch.request(method, url, opts) + else + Rawresp.request(method, url, opts) + end end def go!(%Couch.Session{} = sess, method, url, opts) do - opts = Keyword.merge(opts, cookie: sess.cookie) - Couch.request!(method, url, opts) + parse_response = Keyword.get(opts, :parse_response, true) + opts = opts + |> Keyword.merge(cookie: sess.cookie) + |> Keyword.delete(:parse_response) + if parse_response do + Couch.request!(method, url, opts) + else + Rawresp.request!(method, url, opts) + end end end @@ -127,8 +140,8 @@ defmodule Couch do def set_auth_options(options) do if Keyword.get(options, :cookie) == nil do headers = Keyword.get(options, :headers, []) - - if headers[:basic_auth] != nil or headers[:authorization] != nil do + if headers[:basic_auth] != nil or headers[:authorization] != nil + or List.keymember?(headers, :"X-Auth-CouchDB-UserName", 0) do options else username = System.get_env("EX_USERNAME") || "adm" diff --git a/test/elixir/lib/couch/db_test.ex b/test/elixir/lib/couch/db_test.ex index 0a091c667de..652fa6bb68d 100644 --- a/test/elixir/lib/couch/db_test.ex +++ b/test/elixir/lib/couch/db_test.ex @@ -192,6 +192,13 @@ defmodule Couch.DBTest do resp.body end + def save(db_name, document) do + resp = Couch.put("/#{db_name}/#{document["_id"]}", body: document) + assert resp.status_code in [201, 202] + assert resp.body["ok"] + Map.put(document, "_rev", resp.body["rev"]) + end + def bulk_save(db_name, docs) do resp = Couch.post( @@ -202,6 +209,7 @@ defmodule Couch.DBTest do ) assert resp.status_code in [201, 202] + resp end def query( @@ -271,6 +279,78 @@ defmodule Couch.DBTest do resp.body end + def compact(db_name) do + resp = Couch.post("/#{db_name}/_compact") + assert resp.status_code == 202 + + retry_until( + fn -> Map.get(info(db_name), "compact_running") == false end, + 200, + 10_000 + ) + + resp.body + end + + def replicate(src, tgt, options \\ []) do + username = System.get_env("EX_USERNAME") || "adm" + password = System.get_env("EX_PASSWORD") || "pass" + + {userinfo, options} = Keyword.pop(options, :userinfo) + + userinfo = + if userinfo == nil do + "#{username}:#{password}" + else + userinfo + end + + src = set_user(src, userinfo) + tgt = set_user(tgt, userinfo) + + defaults = [headers: [], body: %{}, timeout: 30_000] + options = defaults |> Keyword.merge(options) |> Enum.into(%{}) + + %{body: body} = options + body = [source: src, target: tgt] |> Enum.into(body) + options = Map.put(options, :body, body) + + resp = Couch.post("/_replicate", Enum.to_list(options)) + assert HTTPotion.Response.success?(resp), "#{inspect(resp)}" + resp.body + end + + defp set_user(uri, userinfo) do + case URI.parse(uri) do + %{scheme: nil} -> + uri + + %{userinfo: nil} = uri -> + URI.to_string(Map.put(uri, :userinfo, userinfo)) + + _ -> + uri + end + end + + def view(db_name, view_name, options \\ nil, keys \\ nil) do + [view_root, view_name] = String.split(view_name, "/") + + resp = + case keys do + nil -> + Couch.get("/#{db_name}/_design/#{view_root}/_view/#{view_name}", query: options) + + _ -> + Couch.post("/#{db_name}/_design/#{view_root}/_view/#{view_name}", query: options, + body: %{"keys" => keys} + ) + end + + assert resp.status_code in [200, 201] + resp + end + def sample_doc_foo do %{ _id: "foo", @@ -300,7 +380,6 @@ defmodule Couch.DBTest do end end - def request_stats(path_steps, is_test) do path = List.foldl( @@ -400,7 +479,7 @@ defmodule Couch.DBTest do node = elem(node_value, 0) value = elem(node_value, 1) - if value == ~s(""\\n) do + if value == ~s(""\\n) or value == "" or value == nil do resp = Couch.delete( "/_node/#{node}/_config/#{setting.section}/#{setting.key}", diff --git a/test/elixir/lib/couch_raw.ex b/test/elixir/lib/couch_raw.ex new file mode 100644 index 00000000000..62a0bbd0ed2 --- /dev/null +++ b/test/elixir/lib/couch_raw.ex @@ -0,0 +1,105 @@ +defmodule Rawresp do + use HTTPotion.Base + + @moduledoc """ + HTTP client that provides raw response as result + """ + @request_timeout 60_000 + @inactivity_timeout 55_000 + + def process_url("http://" <> _ = url) do + url + end + + def process_url(url) do + base_url = System.get_env("EX_COUCH_URL") || "http://127.0.0.1:15984" + base_url <> url + end + + def process_request_headers(headers, _body, options) do + headers = + headers + |> Keyword.put(:"User-Agent", "couch-potion") + + headers = + if headers[:"Content-Type"] do + headers + else + Keyword.put(headers, :"Content-Type", "application/json") + end + + case Keyword.get(options, :cookie) do + nil -> + headers + + cookie -> + Keyword.put(headers, :Cookie, cookie) + end + end + + def process_options(options) do + options + |> set_auth_options() + |> set_inactivity_timeout() + |> set_request_timeout() + end + + def process_request_body(body) do + if is_map(body) do + :jiffy.encode(body) + else + body + end + end + + def set_auth_options(options) do + if Keyword.get(options, :cookie) == nil do + headers = Keyword.get(options, :headers, []) + + if headers[:basic_auth] != nil or headers[:authorization] != nil do + options + else + username = System.get_env("EX_USERNAME") || "adm" + password = System.get_env("EX_PASSWORD") || "pass" + Keyword.put(options, :basic_auth, {username, password}) + end + else + options + end + end + + def set_inactivity_timeout(options) do + Keyword.update( + options, + :ibrowse, + [{:inactivity_timeout, @inactivity_timeout}], + fn ibrowse -> + Keyword.put_new(ibrowse, :inactivity_timeout, @inactivity_timeout) + end + ) + end + + def set_request_timeout(options) do + timeout = Application.get_env(:httpotion, :default_timeout, @request_timeout) + Keyword.put_new(options, :timeout, timeout) + end + + def login(userinfo) do + [user, pass] = String.split(userinfo, ":", parts: 2) + login(user, pass) + end + + def login(user, pass, expect \\ :success) do + resp = Couch.post("/_session", body: %{:username => user, :password => pass}) + + if expect == :success do + true = resp.body["ok"] + cookie = resp.headers[:"set-cookie"] + [token | _] = String.split(cookie, ";") + %Couch.Session{cookie: token} + else + true = Map.has_key?(resp.body, "error") + %Couch.Session{error: resp.body["error"]} + end + end +end diff --git a/test/elixir/test/all_docs_test.exs b/test/elixir/test/all_docs_test.exs index a091dce555b..b9fb6f24192 100644 --- a/test/elixir/test/all_docs_test.exs +++ b/test/elixir/test/all_docs_test.exs @@ -2,6 +2,7 @@ defmodule AllDocsTest do use CouchTestCase @moduletag :all_docs + @moduletag kind: :single_node @moduledoc """ Test CouchDB _all_docs diff --git a/test/elixir/test/attachment_names_test.exs b/test/elixir/test/attachment_names_test.exs index 4593a850462..66596c865ca 100644 --- a/test/elixir/test/attachment_names_test.exs +++ b/test/elixir/test/attachment_names_test.exs @@ -2,6 +2,7 @@ defmodule AttachmentNamesTest do use CouchTestCase @moduletag :attachments + @moduletag kind: :single_node @good_doc """ { diff --git a/test/elixir/test/attachment_paths_test.exs b/test/elixir/test/attachment_paths_test.exs index b776feabf57..4c79eca1c03 100644 --- a/test/elixir/test/attachment_paths_test.exs +++ b/test/elixir/test/attachment_paths_test.exs @@ -2,6 +2,7 @@ defmodule AttachmentPathsTest do use CouchTestCase @moduletag :attachments + @moduletag kind: :single_node @bin_att_doc """ { diff --git a/test/elixir/test/attachment_ranges_test.exs b/test/elixir/test/attachment_ranges_test.exs index 01c1239bc52..de39836b02f 100644 --- a/test/elixir/test/attachment_ranges_test.exs +++ b/test/elixir/test/attachment_ranges_test.exs @@ -2,6 +2,7 @@ defmodule AttachmentRangesTest do use CouchTestCase @moduletag :attachments + @moduletag kind: :single_node @moduledoc """ Test CouchDB attachment range requests diff --git a/test/elixir/test/attachment_views_test.exs b/test/elixir/test/attachment_views_test.exs index 3da62f04216..044008531e5 100644 --- a/test/elixir/test/attachment_views_test.exs +++ b/test/elixir/test/attachment_views_test.exs @@ -2,6 +2,7 @@ defmodule AttachmentViewTest do use CouchTestCase @moduletag :attachments + @moduletag kind: :single_node @moduledoc """ Test CouchDB attachment views requests diff --git a/test/elixir/test/attachments_multipart_test.exs b/test/elixir/test/attachments_multipart_test.exs index f7d5d9519f9..f635377a101 100644 --- a/test/elixir/test/attachments_multipart_test.exs +++ b/test/elixir/test/attachments_multipart_test.exs @@ -2,6 +2,7 @@ defmodule AttachmentMultipartTest do use CouchTestCase @moduletag :attachments + @moduletag kind: :single_node @moduledoc """ Test CouchDB attachment multipart requests diff --git a/test/elixir/test/attachments_test.exs b/test/elixir/test/attachments_test.exs index 02000337727..8e7f7d3529b 100644 --- a/test/elixir/test/attachments_test.exs +++ b/test/elixir/test/attachments_test.exs @@ -2,6 +2,7 @@ defmodule AttachmentsTest do use CouchTestCase @moduletag :attachments + @moduletag kind: :single_node # MD5 Digests of compressible attachments and therefore Etags # will vary depending on platform gzip implementation. diff --git a/test/elixir/test/auth_cache_test.exs b/test/elixir/test/auth_cache_test.exs index 2ba396de71c..5c446f039d5 100644 --- a/test/elixir/test/auth_cache_test.exs +++ b/test/elixir/test/auth_cache_test.exs @@ -2,6 +2,7 @@ defmodule AuthCacheTest do use CouchTestCase @moduletag :authentication + @moduletag kind: :single_node @tag :pending @tag :with_db @@ -66,14 +67,6 @@ defmodule AuthCacheTest do sess end - defp wait_until_compact_complete(db_name) do - retry_until( - fn -> Map.get(info(db_name), "compact_running") == false end, - 200, - 10_000 - ) - end - defp assert_cache(event, user, password, expect \\ :expect_login_success) do hits_before = hits() misses_before = misses() @@ -112,12 +105,6 @@ defmodule AuthCacheTest do end end - defp compact(db_name) do - resp = Couch.post("/#{db_name}/_compact") - assert resp.status_code == 202 - resp.body - end - def save_doc(db_name, body) do resp = Couch.put("/#{db_name}/#{body["_id"]}", body: body) assert resp.status_code in [201, 202] @@ -206,7 +193,6 @@ defmodule AuthCacheTest do # there was a cache hit assert_cache(:expect_hit, "johndoe", "123456") compact(db_name) - wait_until_compact_complete(db_name) assert_cache(:expect_hit, "johndoe", "123456") end end diff --git a/test/elixir/test/basics_test.exs b/test/elixir/test/basics_test.exs index a03fa2922cd..359ba6040f1 100644 --- a/test/elixir/test/basics_test.exs +++ b/test/elixir/test/basics_test.exs @@ -2,6 +2,7 @@ defmodule BasicsTest do use CouchTestCase @moduletag :basics + @moduletag kind: :single_node @moduledoc """ Test CouchDB basics. diff --git a/test/elixir/test/batch_save_test.exs b/test/elixir/test/batch_save_test.exs index 030fcdfba87..f13fcdc9f8f 100644 --- a/test/elixir/test/batch_save_test.exs +++ b/test/elixir/test/batch_save_test.exs @@ -2,6 +2,7 @@ defmodule BatchSaveTest do use CouchTestCase @moduletag :batch_save + @moduletag kind: :performance @moduledoc """ Test CouchDB batch save diff --git a/test/elixir/test/bulk_docs_test.exs b/test/elixir/test/bulk_docs_test.exs index 1a7c1104581..cbbc533405d 100644 --- a/test/elixir/test/bulk_docs_test.exs +++ b/test/elixir/test/bulk_docs_test.exs @@ -2,6 +2,7 @@ defmodule BulkDocsTest do use CouchTestCase @moduletag :bulk_docs + @moduletag kind: :single_node @moduledoc """ Test CouchDB bulk docs diff --git a/test/elixir/test/changes_async_test.exs b/test/elixir/test/changes_async_test.exs new file mode 100644 index 00000000000..001c5d58c83 --- /dev/null +++ b/test/elixir/test/changes_async_test.exs @@ -0,0 +1,443 @@ +defmodule ChangesAsyncTest do + use CouchTestCase + + @moduletag :changes + @moduletag kind: :single_node + + @moduledoc """ + Test CouchDB /{db}/_changes + """ + + @tag :with_db + test "live changes", context do + db_name = context[:db_name] + test_changes(db_name, "live") + end + + @tag :with_db + test "continuous changes", context do + db_name = context[:db_name] + test_changes(db_name, "continuous") + end + + @tag :with_db + test "longpoll changes", context do + db_name = context[:db_name] + + check_empty_db(db_name) + + create_doc(db_name, sample_doc_foo()) + + req_id = + Couch.get("/#{db_name}/_changes?feed=longpoll", + stream_to: self() + ) + + changes = process_response(req_id.id, &parse_chunk/1) + {changes_length, last_seq_prefix} = parse_changes_response(changes) + assert changes_length == 1, "db should not be empty" + assert last_seq_prefix == "1-", "seq must start with 1-" + + last_seq = changes["last_seq"] + {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url("")) + + req_id = + Couch.get("/#{db_name}/_changes?feed=longpoll&since=#{last_seq}", + stream_to: self(), + direct: worker_pid + ) + + :ok = wait_for_headers(req_id.id, 200) + + create_doc_bar(db_name, "bar") + + {changes_length, last_seq_prefix} = + req_id.id + |> process_response(&parse_chunk/1) + |> parse_changes_response() + + assert changes_length == 1, "should return one change" + assert last_seq_prefix == "2-", "seq must start with 2-" + + req_id = + Couch.get("/#{db_name}/_changes?feed=longpoll&since=now", + stream_to: self(), + direct: worker_pid + ) + + :ok = wait_for_headers(req_id.id, 200) + + create_doc_bar(db_name, "barzzzz") + + changes = process_response(req_id.id, &parse_chunk/1) + {changes_length, last_seq_prefix} = parse_changes_response(changes) + assert changes_length == 1, "should return one change" + assert Enum.at(changes["results"], 0)["id"] == "barzzzz" + assert last_seq_prefix == "3-", "seq must start with 3-" + end + + @tag :with_db + test "eventsource changes", context do + db_name = context[:db_name] + + check_empty_db(db_name) + + create_doc(db_name, sample_doc_foo()) + {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url("")) + + req_id = + Rawresp.get("/#{db_name}/_changes?feed=eventsource&timeout=500", + stream_to: self(), + direct: worker_pid + ) + + :ok = wait_for_headers(req_id.id, 200) + + create_doc_bar(db_name, "bar") + + changes = process_response(req_id.id, &parse_event/1) + + assert length(changes) == 2 + assert Enum.at(changes, 0)["id"] == "foo" + assert Enum.at(changes, 1)["id"] == "bar" + + HTTPotion.stop_worker_process(worker_pid) + end + + @tag :with_db + test "eventsource heartbeat", context do + db_name = context[:db_name] + + {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url("")) + + req_id = + Rawresp.get("/#{db_name}/_changes?feed=eventsource&heartbeat=10", + stream_to: {self(), :once}, + direct: worker_pid + ) + + :ok = wait_for_headers(req_id.id, 200) + beats = wait_for_heartbeats(req_id.id, 0, 3) + assert beats == 3 + HTTPotion.stop_worker_process(worker_pid) + end + + @tag :with_db + test "longpoll filtered changes", context do + db_name = context[:db_name] + create_filters_view(db_name) + + create_doc(db_name, %{bop: "foom"}) + create_doc(db_name, %{bop: false}) + + req_id = + Couch.get("/#{db_name}/_changes?feed=longpoll&filter=changes_filter/bop", + stream_to: self() + ) + + changes = process_response(req_id.id, &parse_chunk/1) + {changes_length, last_seq_prefix} = parse_changes_response(changes) + assert changes_length == 1, "db should not be empty" + assert last_seq_prefix == "3-", "seq must start with 3-" + + last_seq = changes["last_seq"] + # longpoll waits until a matching change before returning + {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url("")) + + req_id = + Couch.get( + "/#{db_name}/_changes?feed=longpoll&filter=changes_filter/bop&since=#{last_seq}", + stream_to: self(), + direct: worker_pid + ) + + :ok = wait_for_headers(req_id.id, 200) + create_doc(db_name, %{_id: "falsy", bop: ""}) + # Doc doesn't match the filter + changes = process_response(req_id.id, &parse_chunk/1) + assert changes == :timeout + + # Doc matches the filter + create_doc(db_name, %{_id: "bingo", bop: "bingo"}) + changes = process_response(req_id.id, &parse_chunk/1) + {changes_length, last_seq_prefix} = parse_changes_response(changes) + assert changes_length == 1, "db should not be empty" + assert last_seq_prefix == "5-", "seq must start with 5-" + assert Enum.at(changes["results"], 0)["id"] == "bingo" + end + + @tag :with_db + test "continuous filtered changes", context do + db_name = context[:db_name] + create_filters_view(db_name) + + create_doc(db_name, %{bop: false}) + create_doc(db_name, %{_id: "bingo", bop: "bingo"}) + + {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url("")) + + req_id = + Rawresp.get( + "/#{db_name}/_changes?feed=continuous&filter=changes_filter/bop&timeout=500", + stream_to: self(), + direct: worker_pid + ) + + :ok = wait_for_headers(req_id.id, 200) + create_doc(db_name, %{_id: "rusty", bop: "plankton"}) + + changes = process_response(req_id.id, &parse_changes_line_chunk/1) + + changes_ids = + changes + |> Enum.filter(fn p -> Map.has_key?(p, "id") end) + |> Enum.map(fn p -> p["id"] end) + + assert Enum.member?(changes_ids, "bingo") + assert Enum.member?(changes_ids, "rusty") + assert length(changes_ids) == 2 + end + + @tag :with_db + test "continuous filtered changes with doc ids", context do + db_name = context[:db_name] + doc_ids = %{doc_ids: ["doc1", "doc3", "doc4"]} + + create_doc(db_name, %{_id: "doc1", value: 1}) + create_doc(db_name, %{_id: "doc2", value: 2}) + + {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url("")) + + req_id = + Rawresp.post( + "/#{db_name}/_changes?feed=continuous&timeout=500&filter=_doc_ids", + body: doc_ids, + headers: ["Content-Type": "application/json"], + stream_to: self(), + direct: worker_pid + ) + + :ok = wait_for_headers(req_id.id, 200) + create_doc(db_name, %{_id: "doc3", value: 3}) + + changes = process_response(req_id.id, &parse_changes_line_chunk/1) + + changes_ids = + changes + |> Enum.filter(fn p -> Map.has_key?(p, "id") end) + |> Enum.map(fn p -> p["id"] end) + + assert Enum.member?(changes_ids, "doc1") + assert Enum.member?(changes_ids, "doc3") + assert length(changes_ids) == 2 + end + + @tag :with_db + test "COUCHDB-1852", context do + db_name = context[:db_name] + + create_doc(db_name, %{bop: "foom"}) + create_doc(db_name, %{bop: "foom"}) + create_doc(db_name, %{bop: "foom"}) + create_doc(db_name, %{bop: "foom"}) + + resp = Couch.get("/#{db_name}/_changes") + assert length(resp.body["results"]) == 4 + seq = Enum.at(resp.body["results"], 1)["seq"] + + {:ok, worker_pid} = HTTPotion.spawn_link_worker_process(Couch.process_url("")) + + # simulate an EventSource request with a Last-Event-ID header + req_id = + Rawresp.get( + "/#{db_name}/_changes?feed=eventsource&timeout=100&since=0", + headers: [Accept: "text/event-stream", "Last-Event-ID": seq], + stream_to: self(), + direct: worker_pid + ) + + changes = process_response(req_id.id, &parse_event/1) + assert length(changes) == 2 + end + + defp wait_for_heartbeats(id, beats, expexted_beats) do + if beats < expexted_beats do + :ibrowse.stream_next(id) + is_heartbeat = process_response(id, &parse_heartbeat/1) + + case is_heartbeat do + :heartbeat -> wait_for_heartbeats(id, beats + 1, expexted_beats) + :timeout -> beats + _ -> wait_for_heartbeats(id, beats, expexted_beats) + end + else + beats + end + end + + defp wait_for_headers(id, status, timeout \\ 1000) do + receive do + %HTTPotion.AsyncHeaders{id: ^id, status_code: ^status} -> + :ok + + _ -> + wait_for_headers(id, status, timeout) + after + timeout -> :timeout + end + end + + defp process_response(id, chunk_parser, timeout \\ 1000) do + receive do + %HTTPotion.AsyncChunk{id: ^id} = msg -> + chunk_parser.(msg) + + _ -> + process_response(id, chunk_parser, timeout) + after + timeout -> :timeout + end + end + + defp parse_chunk(msg) do + msg.chunk |> IO.iodata_to_binary() |> :jiffy.decode([:return_maps]) + end + + defp parse_event(msg) do + captures = Regex.scan(~r/data: (.*)/, msg.chunk) + + captures + |> Enum.map(fn p -> Enum.at(p, 1) end) + |> Enum.filter(fn p -> String.trim(p) != "" end) + |> Enum.map(fn p -> + p + |> IO.iodata_to_binary() + |> :jiffy.decode([:return_maps]) + end) + end + + defp parse_heartbeat(msg) do + is_heartbeat = Regex.match?(~r/event: heartbeat/, msg.chunk) + + if is_heartbeat do + :heartbeat + else + :other + end + end + + defp parse_changes_response(changes) do + {length(changes["results"]), String.slice(changes["last_seq"], 0..1)} + end + + defp check_empty_db(db_name) do + resp = Couch.get("/#{db_name}/_changes") + assert resp.body["results"] == [], "db must be empty" + assert String.at(resp.body["last_seq"], 0) == "0", "seq must start with 0" + end + + defp test_changes(db_name, feed) do + check_empty_db(db_name) + {_, resp} = create_doc(db_name, sample_doc_foo()) + rev = resp.body["rev"] + + # TODO: retry_part + resp = Couch.get("/#{db_name}/_changes") + assert length(resp.body["results"]) == 1, "db must not be empty" + assert String.at(resp.body["last_seq"], 0) == "1", "seq must start with 1" + + # increase timeout to 100 to have enough time 2 assemble + # (seems like too little timeouts kill + resp = Rawresp.get("/#{db_name}/_changes?feed=#{feed}&timeout=100") + changes = parse_changes_line(resp.body) + + change = Enum.at(changes, 0) + assert Enum.at(change["changes"], 0)["rev"] == rev + + # the sequence is not fully ordered and a complex structure now + change = Enum.at(changes, 1) + assert String.at(change["last_seq"], 0) == "1" + + # create_doc_bar(db_name,"bar") + {:ok, worker_pid} = HTTPotion.spawn_worker_process(Couch.process_url("")) + + %HTTPotion.AsyncResponse{id: req_id} = + Rawresp.get("/#{db_name}/_changes?feed=#{feed}&timeout=500", + stream_to: self(), + direct: worker_pid + ) + + :ok = wait_for_headers(req_id, 200) + create_doc_bar(db_name, "bar") + + changes = process_response(req_id, &parse_changes_line_chunk/1) + assert length(changes) == 3 + + HTTPotion.stop_worker_process(worker_pid) + end + + def create_doc_bar(db_name, id) do + create_doc(db_name, %{:_id => id, :bar => 1}) + end + + defp parse_changes_line_chunk(msg) do + parse_changes_line(msg.chunk) + end + + defp parse_changes_line(body) do + body_lines = String.split(body, "\n") + + body_lines + |> Enum.filter(fn line -> line != "" end) + |> Enum.map(fn line -> + line |> IO.iodata_to_binary() |> :jiffy.decode([:return_maps]) + end) + end + + defp create_filters_view(db_name) do + dynamic_fun = """ + function(doc, req) { + var field = req.query.field; + return doc[field]; + } + """ + + userctx_fun = """ + function(doc, req) { + var field = req.query.field; + return doc[field]; + } + """ + + blah_fun = """ + function(doc) { + if (doc._id == "blah") { + emit(null, null); + } + } + """ + + ddoc = %{ + _id: "_design/changes_filter", + filters: %{ + bop: "function(doc, req) { return (doc.bop);}", + dynamic: dynamic_fun, + userCtx: userctx_fun, + conflicted: "function(doc, req) { return (doc._conflicts);}" + }, + options: %{ + local_seq: true + }, + views: %{ + local_seq: %{ + map: "function(doc) {emit(doc._local_seq, null)}" + }, + blah: %{ + map: blah_fun + } + } + } + + create_doc(db_name, ddoc) + end +end diff --git a/test/elixir/test/changes_test.exs b/test/elixir/test/changes_test.exs index b5545087b24..ad579a99ae6 100644 --- a/test/elixir/test/changes_test.exs +++ b/test/elixir/test/changes_test.exs @@ -2,6 +2,7 @@ defmodule ChangesTest do use CouchTestCase @moduletag :changes + @moduletag kind: :single_node @moduledoc """ Test CouchDB /{db}/_changes @@ -11,33 +12,441 @@ defmodule ChangesTest do test "Changes feed negative heartbeat", context do db_name = context[:db_name] - resp = Couch.get( - "/#{db_name}/_changes", - query: %{ - :feed => "continuous", - :heartbeat => -1000 - } - ) + resp = + Couch.get( + "/#{db_name}/_changes", + query: %{ + :feed => "continuous", + :heartbeat => -1000 + } + ) assert resp.status_code == 400 assert resp.body["error"] == "bad_request" - assert resp.body["reason"] == "The heartbeat value should be a positive integer (in milliseconds)." + + assert resp.body["reason"] == + "The heartbeat value should be a positive integer (in milliseconds)." end @tag :with_db test "Changes feed non-integer heartbeat", context do db_name = context[:db_name] - resp = Couch.get( - "/#{db_name}/_changes", - query: %{ - :feed => "continuous", - :heartbeat => "a1000" - } - ) + resp = + Couch.get( + "/#{db_name}/_changes", + query: %{ + :feed => "continuous", + :heartbeat => "a1000" + } + ) assert resp.status_code == 400 assert resp.body["error"] == "bad_request" - assert resp.body["reason"] == "Invalid heartbeat value. Expecting a positive integer value (in milliseconds)." + + assert resp.body["reason"] == + "Invalid heartbeat value. Expecting a positive integer value (in milliseconds)." + end + + @tag :with_db + test "function filtered changes", context do + db_name = context[:db_name] + create_filters_view(db_name) + + resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/bop") + assert Enum.empty?(resp.body["results"]), "db must be empty" + + {:ok, doc_resp} = create_doc(db_name, %{bop: "foom"}) + rev = doc_resp.body["rev"] + id = doc_resp.body["id"] + create_doc(db_name, %{bop: false}) + + resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/bop") + assert length(resp.body["results"]) == 1 + change_rev = get_change_rev_at(resp.body["results"], 0) + assert change_rev == rev + + doc = open_doc(db_name, id) + doc = Map.put(doc, "newattr", "a") + + doc = save_doc(db_name, doc) + + resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/bop") + assert length(resp.body["results"]) == 1 + new_change_rev = get_change_rev_at(resp.body["results"], 0) + assert new_change_rev == doc["_rev"] + assert new_change_rev != change_rev + + resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/dynamic&field=woox") + assert Enum.empty?(resp.body["results"]), "db must be empty" + + resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/dynamic&field=bop") + assert length(resp.body["results"]) == 1, "db must have one change" + new_change_rev = get_change_rev_at(resp.body["results"], 0) + assert new_change_rev == doc["_rev"] + end + + @tag :with_db + test "non-existing desing doc for filtered changes", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_changes?filter=nothingtosee/bop") + assert resp.status_code == 404 + end + + @tag :with_db + test "non-existing function for filtered changes", context do + db_name = context[:db_name] + create_filters_view(db_name) + resp = Couch.get("/#{db_name}/_changes?filter=changes_filter/movealong") + assert resp.status_code == 404 + end + + @tag :with_db + test "non-existing desing doc and funcion for filtered changes", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_changes?filter=nothingtosee/movealong") + assert resp.status_code == 404 + end + + @tag :with_db + test "map function filtered changes", context do + db_name = context[:db_name] + create_filters_view(db_name) + create_doc(db_name, %{_id: "blah", bop: "plankton"}) + resp = Couch.get("/#{db_name}/_changes?filter=_view&view=changes_filter/blah") + assert length(resp.body["results"]) == 1 + assert Enum.at(resp.body["results"], 0)["id"] == "blah" + end + + @tag :with_db + test "changes limit", context do + db_name = context[:db_name] + + create_doc(db_name, %{_id: "blah", bop: "plankton"}) + create_doc(db_name, %{_id: "blah2", bop: "plankton"}) + create_doc(db_name, %{_id: "blah3", bop: "plankton"}) + + resp = Couch.get("/#{db_name}/_changes?limit=1") + assert length(resp.body["results"]) == 1 + + resp = Couch.get("/#{db_name}/_changes?limit=2") + assert length(resp.body["results"]) == 2 + end + + @tag :with_db + test "erlang function filtered changes", context do + db_name = context[:db_name] + create_erlang_filters_view(db_name) + + resp = Couch.get("/#{db_name}/_changes?filter=erlang/foo") + assert Enum.empty?(resp.body["results"]) + + create_doc(db_name, %{_id: "doc1", value: 1}) + create_doc(db_name, %{_id: "doc2", value: 2}) + create_doc(db_name, %{_id: "doc3", value: 3}) + create_doc(db_name, %{_id: "doc4", value: 4}) + + resp = Couch.get("/#{db_name}/_changes?filter=erlang/foo") + + changes_ids = + resp.body["results"] + |> Enum.map(fn p -> p["id"] end) + + assert Enum.member?(changes_ids, "doc2") + assert Enum.member?(changes_ids, "doc4") + assert length(resp.body["results"]) == 2 + end + + @tag :with_db + test "changes filtering on docids", context do + db_name = context[:db_name] + doc_ids = %{doc_ids: ["doc1", "doc3", "doc4"]} + + resp = + Couch.post("/#{db_name}/_changes?filter=_doc_ids", + body: doc_ids, + headers: ["Content-Type": "application/json"] + ) + + assert Enum.empty?(resp.body["results"]) + + create_doc(db_name, %{_id: "doc1", value: 1}) + create_doc(db_name, %{_id: "doc2", value: 2}) + + resp = + Couch.post("/#{db_name}/_changes?filter=_doc_ids", + body: doc_ids, + headers: ["Content-Type": "application/json"] + ) + + assert length(resp.body["results"]) == 1 + assert Enum.at(resp.body["results"], 0)["id"] == "doc1" + + create_doc(db_name, %{_id: "doc3", value: 3}) + + resp = + Couch.post("/#{db_name}/_changes?filter=_doc_ids", + body: doc_ids, + headers: ["Content-Type": "application/json"] + ) + + assert length(resp.body["results"]) == 2 + + changes_ids = + resp.body["results"] + |> Enum.map(fn p -> p["id"] end) + + assert Enum.member?(changes_ids, "doc1") + assert Enum.member?(changes_ids, "doc3") + + encoded_doc_ids = doc_ids.doc_ids |> :jiffy.encode() + + resp = + Couch.get("/#{db_name}/_changes", + query: %{filter: "_doc_ids", doc_ids: encoded_doc_ids} + ) + + assert length(resp.body["results"]) == 2 + + changes_ids = + resp.body["results"] + |> Enum.map(fn p -> p["id"] end) + + assert Enum.member?(changes_ids, "doc1") + assert Enum.member?(changes_ids, "doc3") + end + + @tag :with_db + test "changes filtering on design docs", context do + db_name = context[:db_name] + + create_erlang_filters_view(db_name) + create_doc(db_name, %{_id: "doc1", value: 1}) + + resp = Couch.get("/#{db_name}/_changes?filter=_design") + assert length(resp.body["results"]) == 1 + assert Enum.at(resp.body["results"], 0)["id"] == "_design/erlang" + end + + @tag :with_db + test "COUCHDB-1037-empty result for ?limit=1&filter=foo/bar in some cases", + context do + db_name = context[:db_name] + + filter_fun = """ + function(doc, req) { + return (typeof doc.integer === "number"); + } + """ + + ddoc = %{ + _id: "_design/testdocs", + language: "javascript", + filters: %{ + testdocsonly: filter_fun + } + } + + create_doc(db_name, ddoc) + + ddoc = %{ + _id: "_design/foobar", + foo: "bar" + } + + create_doc(db_name, ddoc) + bulk_save(db_name, make_docs(0..4)) + + resp = Couch.get("/#{db_name}/_changes") + assert length(resp.body["results"]) == 7 + + resp = Couch.get("/#{db_name}/_changes?limit=1&filter=testdocs/testdocsonly") + assert length(resp.body["results"]) == 1 + # we can't guarantee ordering + assert Regex.match?(~r/[0-4]/, Enum.at(resp.body["results"], 0)["id"]) + + resp = Couch.get("/#{db_name}/_changes?limit=2&filter=testdocs/testdocsonly") + assert length(resp.body["results"]) == 2 + # we can't guarantee ordering + assert Regex.match?(~r/[0-4]/, Enum.at(resp.body["results"], 0)["id"]) + assert Regex.match?(~r/[0-4]/, Enum.at(resp.body["results"], 1)["id"]) + end + + @tag :with_db + test "COUCHDB-1256", context do + db_name = context[:db_name] + {:ok, resp} = create_doc(db_name, %{_id: "foo", a: 123}) + create_doc(db_name, %{_id: "bar", a: 456}) + foo_rev = resp.body["rev"] + + Couch.put("/#{db_name}/foo?new_edits=false", + headers: ["Content-Type": "application/json"], + body: %{_rev: foo_rev, a: 456} + ) + + resp = Couch.get("/#{db_name}/_changes?style=all_docs") + assert length(resp.body["results"]) == 2 + + resp = + Couch.get("/#{db_name}/_changes", + query: %{style: "all_docs", since: Enum.at(resp.body["results"], 0)["seq"]} + ) + + assert length(resp.body["results"]) == 1 + end + + @tag :with_db + test "COUCHDB-1923", context do + db_name = context[:db_name] + attachment_data = "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=" + + docs = + make_docs(20..29, %{ + _attachments: %{ + "foo.txt": %{ + content_type: "text/plain", + data: attachment_data + }, + "bar.txt": %{ + content_type: "text/plain", + data: attachment_data + } + } + }) + + bulk_save(db_name, docs) + + resp = Couch.get("/#{db_name}/_changes?include_docs=true") + assert length(resp.body["results"]) == 10 + + first_doc = Enum.at(resp.body["results"], 0)["doc"] + + assert first_doc["_attachments"]["foo.txt"]["stub"] + assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "data") + assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "encoding") + assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "encoded_length") + assert first_doc["_attachments"]["bar.txt"]["stub"] + assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "data") + assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "encoding") + assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "encoded_length") + + resp = Couch.get("/#{db_name}/_changes?include_docs=true&attachments=true") + assert length(resp.body["results"]) == 10 + + first_doc = Enum.at(resp.body["results"], 0)["doc"] + + assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "stub") + assert first_doc["_attachments"]["foo.txt"]["data"] == attachment_data + assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "encoding") + assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "encoded_length") + + assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "stub") + assert first_doc["_attachments"]["bar.txt"]["data"] == attachment_data + assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "encoding") + assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "encoded_length") + + resp = Couch.get("/#{db_name}/_changes?include_docs=true&att_encoding_info=true") + assert length(resp.body["results"]) == 10 + + first_doc = Enum.at(resp.body["results"], 0)["doc"] + + assert first_doc["_attachments"]["foo.txt"]["stub"] + assert not Enum.member?(first_doc["_attachments"]["foo.txt"], "data") + assert first_doc["_attachments"]["foo.txt"]["encoding"] == "gzip" + assert first_doc["_attachments"]["foo.txt"]["encoded_length"] == 47 + assert first_doc["_attachments"]["bar.txt"]["stub"] + assert not Enum.member?(first_doc["_attachments"]["bar.txt"], "data") + assert first_doc["_attachments"]["bar.txt"]["encoding"] == "gzip" + assert first_doc["_attachments"]["bar.txt"]["encoded_length"] == 47 + end + + defp create_erlang_filters_view(db_name) do + erlang_fun = """ + fun({Doc}, Req) -> + case couch_util:get_value(<<"value">>, Doc) of + undefined -> false; + Value -> (Value rem 2) =:= 0; + _ -> false + end + end. + """ + + ddoc = %{ + _id: "_design/erlang", + language: "erlang", + filters: %{ + foo: erlang_fun + } + } + + create_doc(db_name, ddoc) + end + + defp create_filters_view(db_name) do + dynamic_fun = """ + function(doc, req) { + var field = req.query.field; + return doc[field]; + } + """ + + userctx_fun = """ + function(doc, req) { + var field = req.query.field; + return doc[field]; + } + """ + + blah_fun = """ + function(doc) { + if (doc._id == "blah") { + emit(null, null); + } + } + """ + + ddoc = %{ + _id: "_design/changes_filter", + filters: %{ + bop: "function(doc, req) { return (doc.bop);}", + dynamic: dynamic_fun, + userCtx: userctx_fun, + conflicted: "function(doc, req) { return (doc._conflicts);}" + }, + options: %{ + local_seq: true + }, + views: %{ + local_seq: %{ + map: "function(doc) {emit(doc._local_seq, null)}" + }, + blah: %{ + map: blah_fun + } + } + } + + create_doc(db_name, ddoc) + end + + defp get_change_rev_at(results, idx) do + results + |> Enum.at(idx) + |> Map.fetch!("changes") + |> Enum.at(0) + |> Map.fetch!("rev") + end + + defp open_doc(db_name, id) do + resp = Couch.get("/#{db_name}/#{id}") + assert resp.status_code == 200 + resp.body + end + + defp save_doc(db_name, body) do + resp = Couch.put("/#{db_name}/#{body["_id"]}", body: body) + assert resp.status_code in [201, 202] + assert resp.body["ok"] + Map.put(body, "_rev", resp.body["rev"]) end end diff --git a/test/elixir/test/cluster_with_quorum_test.exs b/test/elixir/test/cluster_with_quorum_test.exs index fc3b28a0b69..dc3d66be355 100644 --- a/test/elixir/test/cluster_with_quorum_test.exs +++ b/test/elixir/test/cluster_with_quorum_test.exs @@ -2,6 +2,7 @@ defmodule WithQuorumTest do use CouchTestCase @moduletag :with_quorum_test + @moduletag kind: :cluster @moduledoc """ Test CouchDB API in a cluster without quorum. diff --git a/test/elixir/test/cluster_without_quorum_test.exs b/test/elixir/test/cluster_without_quorum_test.exs index e0095c351ce..63371f1a60b 100644 --- a/test/elixir/test/cluster_without_quorum_test.exs +++ b/test/elixir/test/cluster_without_quorum_test.exs @@ -2,6 +2,7 @@ defmodule WithoutQuorumTest do use CouchTestCase @moduletag :without_quorum_test + @moduletag kind: :degraded_cluster @moduledoc """ Test CouchDB API in a cluster without quorum. diff --git a/test/elixir/test/coffee_test.exs b/test/elixir/test/coffee_test.exs index 3b26f5e59e7..3c7a1052bc9 100644 --- a/test/elixir/test/coffee_test.exs +++ b/test/elixir/test/coffee_test.exs @@ -2,6 +2,7 @@ defmodule CoffeeTest do use CouchTestCase @moduletag :coffee + @moduletag kind: :single_node @moduledoc """ Test basic coffeescript functionality. diff --git a/test/elixir/test/compact_test.exs b/test/elixir/test/compact_test.exs index d99a7a78ee3..18aeab2de7a 100644 --- a/test/elixir/test/compact_test.exs +++ b/test/elixir/test/compact_test.exs @@ -2,6 +2,7 @@ defmodule CompactTest do use CouchTestCase @moduletag :compact + @moduletag kind: :single_node @moduledoc """ Test CouchDB compaction @@ -82,18 +83,6 @@ defmodule CompactTest do assert Couch.post("/#{db}/_ensure_full_commit").body["ok"] == true end - defp compact(db) do - assert Couch.post("/#{db}/_compact").status_code == 202 - - retry_until( - fn -> - Couch.get("/#{db}").body["compact_running"] == false - end, - 200, - 20_000 - ) - end - defp get_info(db) do Couch.get("/#{db}").body end diff --git a/test/elixir/test/config/test-config.ini b/test/elixir/test/config/test-config.ini new file mode 100644 index 00000000000..1980139d12b --- /dev/null +++ b/test/elixir/test/config/test-config.ini @@ -0,0 +1,2 @@ +[chttpd] +authentication_handlers = {chttpd_auth, jwt_authentication_handler}, {chttpd_auth, proxy_authentication_handler}, {chttpd_auth, cookie_authentication_handler}, {chttpd_auth, default_authentication_handler} diff --git a/test/elixir/test/config_test.exs b/test/elixir/test/config_test.exs index 53c5bc82e58..bb89d8683e4 100644 --- a/test/elixir/test/config_test.exs +++ b/test/elixir/test/config_test.exs @@ -2,6 +2,7 @@ defmodule ConfigTest do use CouchTestCase @moduletag :config + @moduletag kind: :single_node @moduledoc """ Test CouchDB config API diff --git a/test/elixir/test/conflicts_test.exs b/test/elixir/test/conflicts_test.exs index a45f5c4ed36..adf16bc4d42 100644 --- a/test/elixir/test/conflicts_test.exs +++ b/test/elixir/test/conflicts_test.exs @@ -2,6 +2,7 @@ defmodule RevisionTest do use CouchTestCase @moduletag :conflicts + @moduletag kind: :single_node @moduledoc """ Test CouchDB conflicts diff --git a/test/elixir/test/cookie_auth_test.exs b/test/elixir/test/cookie_auth_test.exs index b10ee84f124..87de1abd3c3 100644 --- a/test/elixir/test/cookie_auth_test.exs +++ b/test/elixir/test/cookie_auth_test.exs @@ -2,6 +2,7 @@ defmodule CookieAuthTest do use CouchTestCase @moduletag :authentication + @moduletag kind: :single_node @users_db "_users" @@ -34,13 +35,14 @@ defmodule CookieAuthTest do # Create db if not exists Couch.put("/#{@users_db}") - resp = - Couch.get( - "/#{@users_db}/_changes", - query: [feed: "longpoll", timeout: 5000, filter: "_design"] - ) - - assert resp.body + retry_until(fn -> + resp = + Couch.get( + "/#{@users_db}/_changes", + query: [feed: "longpoll", timeout: 5000, filter: "_design"] + ) + length(resp.body["results"]) > 0 + end) on_exit(&tear_down/0) diff --git a/test/elixir/test/copy_doc_test.exs b/test/elixir/test/copy_doc_test.exs index 4641ff6ea12..f227fdac594 100644 --- a/test/elixir/test/copy_doc_test.exs +++ b/test/elixir/test/copy_doc_test.exs @@ -2,6 +2,7 @@ defmodule CopyDocTest do use CouchTestCase @moduletag :copy_doc + @moduletag kind: :single_node @moduledoc """ Test CouchDB Copy Doc diff --git a/test/elixir/test/design_docs_query_test.exs b/test/elixir/test/design_docs_query_test.exs new file mode 100644 index 00000000000..9784f8debb3 --- /dev/null +++ b/test/elixir/test/design_docs_query_test.exs @@ -0,0 +1,274 @@ +defmodule DesignDocsQueryTest do + use CouchTestCase + + @moduletag :design_docs + @moduletag kind: :single_node + + @moduledoc """ + Test CouchDB /{db}/_design_docs + """ + + setup_all do + db_name = random_db_name() + {:ok, _} = create_db(db_name) + on_exit(fn -> delete_db(db_name) end) + + bulk_save(db_name, make_docs(1..5)) + + Enum.each(1..5, fn x -> create_ddoc(db_name, x) end) + + {:ok, [db_name: db_name]} + end + + defp create_ddoc(db_name, idx) do + ddoc = %{ + _id: "_design/ddoc0#{idx}", + views: %{ + testing: %{ + map: "function(){emit(1,1)}" + } + } + } + + create_doc(db_name, ddoc) + end + + test "query _design_docs (GET with no parameters)", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design_docs") + assert resp.status_code == 200, "standard get should be 200" + assert resp.body["total_rows"] == 5, "total_rows mismatch" + assert length(resp.body["rows"]) == 5, "amount of rows mismatch" + end + + test "query _design_docs with single key", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design_docs?key=\"_design/ddoc03\"") + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 1, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc03" + end + + test "query _design_docs with multiple key", context do + resp = + Couch.get( + "/#{context[:db_name]}/_design_docs", + query: %{ + :keys => "[\"_design/ddoc02\", \"_design/ddoc03\"]" + } + ) + + assert resp.status_code == 200 + assert length(Map.get(resp, :body)["rows"]) == 2 + end + + test "POST with empty body", context do + resp = + Couch.post( + "/#{context[:db_name]}/_design_docs", + body: %{} + ) + + assert resp.status_code == 200 + assert length(Map.get(resp, :body)["rows"]) == 5 + end + + test "POST with keys and limit", context do + resp = + Couch.post( + "/#{context[:db_name]}/_design_docs", + body: %{ + :keys => ["_design/ddoc02", "_design/ddoc03"], + :limit => 1 + } + ) + + assert resp.status_code == 200 + assert length(Map.get(resp, :body)["rows"]) == 1 + end + + test "POST with query parameter and JSON body", context do + resp = + Couch.post( + "/#{context[:db_name]}/_design_docs", + query: %{ + :limit => 1 + }, + body: %{ + :keys => ["_design/ddoc02", "_design/ddoc03"] + } + ) + + assert resp.status_code == 200 + assert length(Map.get(resp, :body)["rows"]) == 1 + end + + test "POST edge case with colliding parameters - query takes precedence", context do + resp = + Couch.post( + "/#{context[:db_name]}/_design_docs", + query: %{ + :limit => 0 + }, + body: %{ + :keys => ["_design/ddoc02", "_design/ddoc03"], + :limit => 2 + } + ) + + assert resp.status_code == 200 + assert Enum.empty?(Map.get(resp, :body)["rows"]) + end + + test "query _design_docs descending=true", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design_docs?descending=true") + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 5, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc05" + end + + test "query _design_docs descending=false", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design_docs?descending=false") + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 5, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc01" + end + + test "query _design_docs end_key", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design_docs?end_key=\"_design/ddoc03\"") + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 3, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 2)["key"] == "_design/ddoc03" + end + + test "query _design_docs endkey", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design_docs?endkey=\"_design/ddoc03\"") + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 3, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 2)["key"] == "_design/ddoc03" + end + + test "query _design_docs start_key", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design_docs?start_key=\"_design/ddoc03\"") + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 3, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc03" + end + + test "query _design_docs startkey", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design_docs?startkey=\"_design/ddoc03\"") + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 3, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc03" + end + + test "query _design_docs end_key inclusive_end=true", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design_docs", + query: [end_key: "\"_design/ddoc03\"", inclusive_end: true] + ) + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 3, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 2)["key"] == "_design/ddoc03" + end + + test "query _design_docs end_key inclusive_end=false", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design_docs", + query: [end_key: "\"_design/ddoc03\"", inclusive_end: false] + ) + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 2, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 1)["key"] == "_design/ddoc02" + end + + test "query _design_docs end_key inclusive_end=false descending", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design_docs", + query: [end_key: "\"_design/ddoc03\"", inclusive_end: false, descending: true] + ) + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 2, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 1)["key"] == "_design/ddoc04" + end + + test "query _design_docs end_key limit", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design_docs", + query: [end_key: "\"_design/ddoc05\"", limit: 2] + ) + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 2, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 1)["key"] == "_design/ddoc02" + end + + test "query _design_docs end_key skip", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design_docs", + query: [end_key: "\"_design/ddoc05\"", skip: 2] + ) + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 3, "amount of rows mismatch" + assert Enum.at(resp.body["rows"], 0)["key"] == "_design/ddoc03" + assert Enum.at(resp.body["rows"], 2)["key"] == "_design/ddoc05" + end + + test "query _design_docs update_seq", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design_docs", + query: [end_key: "\"_design/ddoc05\"", update_seq: true] + ) + + assert resp.status_code == 200, "standard get should be 200" + assert Map.has_key?(resp.body, "update_seq") + end + + test "query _design_docs post with keys", context do + db_name = context[:db_name] + + resp = + Couch.post("/#{db_name}/_design_docs", + headers: ["Content-Type": "application/json"], + body: %{keys: ["_design/ddoc02", "_design/ddoc03"]} + ) + + keys = + resp.body["rows"] + |> Enum.map(fn p -> p["key"] end) + + assert resp.status_code == 200, "standard get should be 200" + assert length(resp.body["rows"]) == 2, "amount of rows mismatch" + assert Enum.member?(keys, "_design/ddoc03") + assert Enum.member?(keys, "_design/ddoc02") + end +end diff --git a/test/elixir/test/design_docs_test.exs b/test/elixir/test/design_docs_test.exs index ed0a0dfb527..86bdd5aa58c 100644 --- a/test/elixir/test/design_docs_test.exs +++ b/test/elixir/test/design_docs_test.exs @@ -2,107 +2,479 @@ defmodule DesignDocsTest do use CouchTestCase @moduletag :design_docs + @moduletag kind: :single_node - @moduledoc """ - Test CouchDB /{db}/_design_docs - """ + @design_doc %{ + _id: "_design/test", + language: "javascript", + autoupdate: false, + whatever: %{ + stringzone: "exports.string = 'plankton';", + commonjs: %{ + whynot: """ + exports.test = require('../stringzone'); + exports.foo = require('whatever/stringzone'); + """, + upper: """ + exports.testing = require('./whynot').test.string.toUpperCase()+ + module.id+require('./whynot').foo.string + """, + circular_one: "require('./circular_two'); exports.name = 'One';", + circular_two: "require('./circular_one'); exports.name = 'Two';" + }, + # paths relative to parent + idtest1: %{ + a: %{ + b: %{d: "module.exports = require('../c/e').id;"}, + c: %{e: "exports.id = module.id;"} + } + }, + # multiple paths relative to parent + idtest2: %{ + a: %{ + b: %{d: "module.exports = require('../../a/c/e').id;"}, + c: %{e: "exports.id = module.id;"} + } + }, + # paths relative to module + idtest3: %{ + a: %{ + b: "module.exports = require('./c/d').id;", + c: %{ + d: "module.exports = require('./e');", + e: "exports.id = module.id;" + } + } + }, + # paths relative to module and parent + idtest4: %{ + a: %{ + b: "module.exports = require('../a/./c/d').id;", + c: %{ + d: "module.exports = require('./e');", + e: "exports.id = module.id;" + } + } + }, + # paths relative to root + idtest5: %{ + a: "module.exports = require('whatever/idtest5/b').id;", + b: "exports.id = module.id;" + } + }, + views: %{ + all_docs_twice: %{ + map: """ + function(doc) { + emit(doc.integer, null); + emit(doc.integer, null); + } + """ + }, + no_docs: %{ + map: """ + function(doc) {} + """ + }, + single_doc: %{ + map: """ + function(doc) { + if (doc._id === "1") { + emit(1, null); + } + } + """ + }, + summate: %{ + map: """ + function(doc) { + emit(doc.integer, doc.integer); + } + """, + reduce: """ + function(keys, values) { + return sum(values); + } + """ + }, + summate2: %{ + map: """ + function(doc) { + emit(doc.integer, doc.integer); + } + """, + reduce: """ + function(keys, values) { + return sum(values); + } + """ + }, + huge_src_and_results: %{ + map: """ + function(doc) { + if (doc._id === "1") { + emit("#{String.duplicate("a", 16)}", null); + } + } + """, + reduce: """ + function(keys, values) { + return "#{String.duplicate("a", 16)}"; + } + """ + }, + lib: %{ + baz: "exports.baz = 'bam';", + foo: %{ + foo: "exports.foo = 'bar';", + boom: "exports.boom = 'ok';", + zoom: "exports.zoom = 'yeah';" + } + }, + commonjs: %{ + map: """ + function(doc) { + emit(null, require('views/lib/foo/boom').boom); + } + """ + } + }, + shows: %{ + simple: """ + function() { + return 'ok'; + } + """, + requirey: """ + function() { + var lib = require('whatever/commonjs/upper'); + return lib.testing; + } + """, + circular: """ + function() { + var lib = require('whatever/commonjs/upper'); + return JSON.stringify(this); + } + """, + circular_require: """ + function() { + return require('whatever/commonjs/circular_one').name; + } + """, + idtest1: """ + function() { + return require('whatever/idtest1/a/b/d'); + } + """, + idtest2: """ + function() { + return require('whatever/idtest2/a/b/d'); + } + """, + idtest3: """ + function() { + return require('whatever/idtest3/a/b'); + } + """, + idtest4: """ + function() { + return require('whatever/idtest4/a/b'); + } + """, + idtest5: """ + function() { + return require('whatever/idtest5/a'); + } + """ + } + } setup_all do db_name = random_db_name() {:ok, _} = create_db(db_name) on_exit(fn -> delete_db(db_name) end) - {:ok, _} = create_doc( - db_name, - %{ - _id: "_design/foo", - bar: "baz" - } - ) + {:ok, _} = create_doc(db_name, @design_doc) + {:ok, _} = create_doc(db_name, %{}) + {:ok, [db_name: db_name]} + end - {:ok, _} = create_doc( - db_name, - %{ - _id: "_design/foo2", - bar: "baz2" - } - ) + test "consistent _rev for design docs", context do + resp = Couch.get("/#{context[:db_name]}/_design/test") + assert resp.status_code == 200 + first_db_rev = resp.body["_rev"] - {:ok, [db_name: db_name]} + second_db_name = random_db_name() + create_db(second_db_name) + {:ok, resp2} = create_doc(second_db_name, @design_doc) + assert first_db_rev == resp2.body["rev"] end - test "GET with no parameters", context do - resp = Couch.get( - "/#{context[:db_name]}/_design_docs" - ) + test "commonjs require", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design/test/_show/requirey") + assert resp.status_code == 200 + assert resp.body == "PLANKTONwhatever/commonjs/upperplankton" + + resp = Couch.get("/#{db_name}/_design/test/_show/circular") + assert resp.status_code == 200 + + result = + resp.body + |> IO.iodata_to_binary() + |> :jiffy.decode([:return_maps]) + assert result["language"] == "javascript" + end + + test "circular commonjs dependencies", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design/test/_show/circular_require") assert resp.status_code == 200 - assert length(Map.get(resp, :body)["rows"]) == 2 + assert resp.body == "One" end - test "GET with multiple keys", context do - resp = Couch.get( - "/#{context[:db_name]}/_design_docs", - query: %{ - :keys => "[\"_design/foo\", \"_design/foo2\"]", - } - ) + test "module id values are as expected", context do + db_name = context[:db_name] + check_id_value(db_name, "idtest1", "whatever/idtest1/a/c/e") + check_id_value(db_name, "idtest2", "whatever/idtest2/a/c/e") + check_id_value(db_name, "idtest3", "whatever/idtest3/a/c/e") + check_id_value(db_name, "idtest4", "whatever/idtest4/a/c/e") + check_id_value(db_name, "idtest5", "whatever/idtest5/b") + end + + defp check_id_value(db_name, id, expected) do + resp = Couch.get("/#{db_name}/_design/test/_show/#{id}") assert resp.status_code == 200 - assert length(Map.get(resp, :body)["rows"]) == 2 + assert resp.body == expected end - test "POST with empty body", context do - resp = Couch.post( - "/#{context[:db_name]}/_design_docs", - body: %{} - ) + @tag :with_db + test "test that we get correct design doc info back", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @design_doc) + + resp = Couch.get("/#{db_name}/_design/test/_info") + prev_view_sig = resp.body["view_index"]["signature"] + prev_view_size = resp.body["view_index"]["sizes"]["file"] + num_docs = 500 + bulk_save(db_name, make_docs(1..(num_docs + 1))) + + Couch.get("/#{db_name}/_design/test/_view/summate", query: [stale: "ok"]) + + for _x <- 0..1 do + resp = Couch.get("/#{db_name}/_design/test/_info") + assert resp.body["name"] == "test" + assert resp.body["view_index"]["sizes"]["file"] == prev_view_size + assert resp.body["view_index"]["compact_running"] == false + assert resp.body["view_index"]["signature"] == prev_view_sig + end + end + + test "commonjs in map functions", context do + db_name = context[:db_name] + + resp = Couch.get("/#{db_name}/_design/test/_view/commonjs", query: [limit: 1]) assert resp.status_code == 200 - assert length(Map.get(resp, :body)["rows"]) == 2 + assert Enum.at(resp.body["rows"], 0)["value"] == "ok" end - test "POST with keys and limit", context do - resp = Couch.post( - "/#{context[:db_name]}/_design_docs", - body: %{ - :keys => ["_design/foo", "_design/foo2"], - :limit => 1 - } - ) + test "_all_docs view returns correctly with keys", context do + db_name = context[:db_name] + resp = + Couch.get("/#{db_name}/_all_docs", + query: [startkey: :jiffy.encode("_design"), endkey: :jiffy.encode("_design0")] + ) + + assert length(resp.body["rows"]) == 1 + end + + @tag :with_db + test "all_docs_twice", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @design_doc) + + num_docs = 500 + bulk_save(db_name, make_docs(1..(2 * num_docs))) + + for _x <- 0..1 do + test_all_docs_twice(db_name, num_docs) + end + end + + defp test_all_docs_twice(db_name, num_docs) do + resp = Couch.get("/#{db_name}/_design/test/_view/all_docs_twice") assert resp.status_code == 200 - assert length(Map.get(resp, :body)["rows"]) == 1 + rows = resp.body["rows"] + + for x <- 0..num_docs do + assert Map.get(Enum.at(rows, 2 * x), "key") == x + 1 + assert Map.get(Enum.at(rows, 2 * x + 1), "key") == x + 1 + end + + resp = Couch.get("/#{db_name}/_design/test/_view/no_docs") + assert resp.body["total_rows"] == 0 + + resp = Couch.get("/#{db_name}/_design/test/_view/single_doc") + assert resp.body["total_rows"] == 1 end - test "POST with query parameter and JSON body", context do - resp = Couch.post( - "/#{context[:db_name]}/_design_docs", - query: %{ - :limit => 1 - }, - body: %{ - :keys => ["_design/foo", "_design/foo2"] + @tag :with_db + test "language not specified, Javascript is implied", context do + db_name = context[:db_name] + bulk_save(db_name, make_docs(1..2)) + + design_doc_2 = %{ + _id: "_design/test2", + views: %{ + single_doc: %{ + map: """ + function(doc) { + if (doc._id === "1") { + emit(1, null); + } + } + """ + } } - ) + } + + {:ok, _} = create_doc(db_name, design_doc_2) + resp = Couch.get("/#{db_name}/_design/test2/_view/single_doc") assert resp.status_code == 200 - assert length(Map.get(resp, :body)["rows"]) == 1 + assert length(resp.body["rows"]) == 1 end - test "POST edge case with colliding parameters - query takes precedence", context do - resp = Couch.post( - "/#{context[:db_name]}/_design_docs", - query: %{ - :limit => 0 - }, - body: %{ - :keys => ["_design/foo", "_design/foo2"], - :limit => 2 + @tag :with_db + test "startkey and endkey", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @design_doc) + + num_docs = 500 + bulk_save(db_name, make_docs(1..(2 * num_docs))) + + resp = Couch.get("/#{db_name}/_design/test/_view/summate") + assert Enum.at(resp.body["rows"], 0)["value"] == summate(num_docs * 2) + + resp = + Couch.get("/#{db_name}/_design/test/_view/summate", + query: [startkey: 4, endkey: 4] + ) + + assert Enum.at(resp.body["rows"], 0)["value"] == 4 + + resp = + Couch.get("/#{db_name}/_design/test/_view/summate", + query: [startkey: 4, endkey: 5] + ) + + assert Enum.at(resp.body["rows"], 0)["value"] == 9 + + resp = + Couch.get("/#{db_name}/_design/test/_view/summate", + query: [startkey: 4, endkey: 6] + ) + + assert Enum.at(resp.body["rows"], 0)["value"] == 15 + + # test start_key and end_key aliases + resp = + Couch.get("/#{db_name}/_design/test/_view/summate", + query: [start_key: 4, end_key: 6] + ) + + assert Enum.at(resp.body["rows"], 0)["value"] == 15 + + # Verify that a shared index (view def is an exact copy of "summate") + # does not confuse the reduce stage + resp = + Couch.get("/#{db_name}/_design/test/_view/summate2", + query: [startkey: 4, endkey: 6] + ) + + assert Enum.at(resp.body["rows"], 0)["value"] == 15 + + for x <- 0..Integer.floor_div(num_docs, 60) do + resp = + Couch.get("/#{db_name}/_design/test/_view/summate", + query: [startkey: x * 30, endkey: num_docs - x * 30] + ) + + assert Enum.at(resp.body["rows"], 0)["value"] == + summate(num_docs - x * 30) - summate(x * 30 - 1) + end + end + + defp summate(n) do + (n + 1) * (n / 2) + end + + @tag :with_db + test "design doc deletion", context do + db_name = context[:db_name] + {:ok, resp} = create_doc(db_name, @design_doc) + + del_resp = + Couch.delete("/#{db_name}/#{resp.body["id"]}", query: [rev: resp.body["rev"]]) + + assert del_resp.status_code == 200 + + resp = Couch.get("/#{db_name}/#{resp.body["id"]}") + assert resp.status_code == 404 + + resp = Couch.get("/#{db_name}/_design/test/_view/no_docs") + assert resp.status_code == 404 + end + + @tag :with_db + test "validate doc update", context do + db_name = context[:db_name] + + # COUCHDB-1227 - if a design document is deleted, by adding a "_deleted" + # field with the boolean value true, its validate_doc_update functions + # should no longer have effect. + + ddoc = %{ + _id: "_design/test", + language: "javascript", + validate_doc_update: """ + function(newDoc, oldDoc, userCtx, secObj) { + if (newDoc.value % 2 == 0) { + throw({forbidden: "dont like even numbers"}); + } + return true; } - ) + """ + } - assert resp.status_code == 200 - assert Enum.empty?(Map.get(resp, :body)["rows"]) + {:ok, resp_ddoc} = create_doc(db_name, ddoc) + + resp = + Couch.post("/#{db_name}", + body: %{_id: "doc1", value: 4} + ) + + assert resp.status_code == 403 + assert resp.body["reason"] == "dont like even numbers" + + ddoc_resp = Couch.get("/#{db_name}/#{resp_ddoc.body["id"]}") + + ddoc = + ddoc_resp.body + |> Map.put("_deleted", true) + + del_resp = + Couch.post("/#{db_name}", + body: ddoc + ) + + assert del_resp.status_code in [201, 202] + + {:ok, _} = create_doc(db_name, %{_id: "doc1", value: 4}) end end diff --git a/test/elixir/test/design_options_test.exs b/test/elixir/test/design_options_test.exs new file mode 100644 index 00000000000..feb47714dda --- /dev/null +++ b/test/elixir/test/design_options_test.exs @@ -0,0 +1,75 @@ +defmodule DesignOptionsTest do + use CouchTestCase + + @moduletag :design_docs + @moduletag kind: :single_node + + @moduledoc """ + Test CouchDB design documents options include_design and local_seq + """ + @tag :with_db + test "design doc options - include_desing=true", context do + db_name = context[:db_name] + + create_test_view(db_name, "_design/fu", %{include_design: true}) + + resp = Couch.get("/#{db_name}/_design/fu/_view/data") + assert resp.status_code == 200 + assert length(Map.get(resp, :body)["rows"]) == 1 + assert Enum.at(resp.body["rows"], 0)["value"] == "_design/fu" + end + + @tag :with_db + test "design doc options - include_desing=false", context do + db_name = context[:db_name] + + create_test_view(db_name, "_design/bingo", %{include_design: false}) + + resp = Couch.get("/#{db_name}/_design/bingo/_view/data") + assert resp.status_code == 200 + assert Enum.empty?(Map.get(resp, :body)["rows"]) + end + + @tag :with_db + test "design doc options - include_design default value", context do + db_name = context[:db_name] + + create_test_view(db_name, "_design/bango", %{}) + + resp = Couch.get("/#{db_name}/_design/bango/_view/data") + assert resp.status_code == 200 + assert Enum.empty?(Map.get(resp, :body)["rows"]) + end + + @tag :with_db + test "design doc options - local_seq=true", context do + db_name = context[:db_name] + + create_test_view(db_name, "_design/fu", %{include_design: true, local_seq: true}) + create_doc(db_name, %{}) + resp = Couch.get("/#{db_name}/_design/fu/_view/with_seq") + + row_with_key = + resp.body["rows"] + |> Enum.filter(fn p -> p["key"] != :null end) + + assert length(row_with_key) == 2 + end + + defp create_test_view(db_name, id, options) do + map = "function (doc) {emit(null, doc._id);}" + withseq = "function(doc) {emit(doc._local_seq, null)}" + + design_doc = %{ + _id: id, + language: "javascript", + options: options, + views: %{ + data: %{map: map}, + with_seq: %{map: withseq} + } + } + + create_doc(db_name, design_doc) + end +end diff --git a/test/elixir/test/design_paths_test.exs b/test/elixir/test/design_paths_test.exs new file mode 100644 index 00000000000..f90172a0800 --- /dev/null +++ b/test/elixir/test/design_paths_test.exs @@ -0,0 +1,77 @@ +defmodule DesignPathTest do + use CouchTestCase + + @moduletag :design_docs + @moduletag kind: :single_node + + @moduledoc """ + Test CouchDB design documents path + """ + @tag :with_db + test "design doc path", context do + db_name = context[:db_name] + ddoc_path_test(db_name) + end + + @tag :with_db_name + test "design doc path with slash in db name", context do + db_name = URI.encode_www_form(context[:db_name] <> "/with_slashes") + create_db(db_name) + ddoc_path_test(db_name) + end + + defp ddoc_path_test(db_name) do + create_test_view(db_name, "_design/test") + + resp = Couch.get("/#{db_name}/_design/test") + assert resp.body["_id"] == "_design/test" + + resp = + Couch.get(Couch.process_url("/#{db_name}/_design%2Ftest"), + follow_redirects: true + ) + + assert resp.body["_id"] == "_design/test" + + resp = Couch.get("/#{db_name}/_design/test/_view/testing") + assert Enum.empty?(Map.get(resp, :body)["rows"]) + + design_doc2 = %{ + _id: "_design/test2", + views: %{ + testing: %{ + map: "function(){emit(1,1)}" + } + } + } + + resp = Couch.put("/#{db_name}/_design/test2", body: design_doc2) + assert resp.status_code == 201 + + resp = Couch.get("/#{db_name}/_design/test2") + assert resp.body["_id"] == "_design/test2" + + resp = + Couch.get(Couch.process_url("/#{db_name}/_design%2Ftest2"), + follow_redirects: true + ) + + assert resp.body["_id"] == "_design/test2" + + resp = Couch.get("/#{db_name}/_design/test2/_view/testing") + assert Enum.empty?(Map.get(resp, :body)["rows"]) + end + + defp create_test_view(db_name, id) do + design_doc = %{ + _id: id, + views: %{ + testing: %{ + map: "function(){emit(1,1)}" + } + } + } + + create_doc(db_name, design_doc) + end +end diff --git a/test/elixir/test/erlang_views_test.exs b/test/elixir/test/erlang_views_test.exs index 3346c22748f..afe9d6ccb3e 100644 --- a/test/elixir/test/erlang_views_test.exs +++ b/test/elixir/test/erlang_views_test.exs @@ -2,6 +2,7 @@ defmodule ErlangViewsTest do use CouchTestCase @moduletag :erlang_views + @moduletag kind: :single_node @moduledoc """ basic 'smoke tests' of erlang views. diff --git a/test/elixir/test/etags_head_test.exs b/test/elixir/test/etags_head_test.exs index 9b9ff8bb04a..beb12bdd05e 100644 --- a/test/elixir/test/etags_head_test.exs +++ b/test/elixir/test/etags_head_test.exs @@ -2,6 +2,7 @@ defmodule EtagsHeadTest do use CouchTestCase @moduletag :etags + @moduletag kind: :single_node @tag :with_db test "etag header on creation", context do diff --git a/test/elixir/test/form_submit_test.exs b/test/elixir/test/form_submit_test.exs new file mode 100644 index 00000000000..099f395fccb --- /dev/null +++ b/test/elixir/test/form_submit_test.exs @@ -0,0 +1,30 @@ +defmodule FormSubmitTest do + use CouchTestCase + + @moduletag :form_submit + @moduletag kind: :single_node + + @moduledoc """ + Test that form submission is invalid + This is a port of form_submit.js + """ + + @tag :with_db + test "form submission gives back invalid content-type", context do + headers = [ + Referer: "http://127.0.0.1:15984", + "Content-Type": "application/x-www-form-urlencoded" + ] + + body = %{} + + %{:body => response_body, :status_code => status_code} = + Couch.post("/#{context[:db_name]}/baz", headers: headers, body: body) + + %{"error" => error, "reason" => reason} = response_body + + assert status_code == 415 + assert error == "bad_content_type" + assert reason == "Content-Type must be multipart/form-data" + end +end diff --git a/test/elixir/test/helper_test.exs b/test/elixir/test/helper_test.exs index 19d70eac8ee..1e498a15c30 100644 --- a/test/elixir/test/helper_test.exs +++ b/test/elixir/test/helper_test.exs @@ -5,6 +5,9 @@ defmodule HelperTest do Test helper code """ + @moduletag :helper + @moduletag kind: :single_node + test "retry_until handles boolean conditions", _context do retry_until(fn -> true diff --git a/test/elixir/test/http_test.exs b/test/elixir/test/http_test.exs new file mode 100644 index 00000000000..14cecfe7b7f --- /dev/null +++ b/test/elixir/test/http_test.exs @@ -0,0 +1,82 @@ +defmodule HttpTest do + use CouchTestCase + + @moduletag :http + @moduletag kind: :single_node + + @tag :with_db + test "location header", context do + db_name = context[:db_name] + resp = Couch.put("/#{db_name}/test", body: %{}) + db_url = Couch.process_url("/" <> db_name) + assert resp.headers.hdrs["location"] == db_url <> "/test" + end + + @tag :with_db + test "location header should include X-Forwarded-Host", context do + db_name = context[:db_name] + + resp = + Couch.put("/#{db_name}/test2", + body: %{}, + headers: ["X-Forwarded-Host": "mysite.com"] + ) + + assert resp.headers.hdrs["location"] == "http://mysite.com/#{db_name}/test2" + end + + @tag :with_db + test "location header should include custom header", context do + db_name = context[:db_name] + + server_config = [ + %{ + :section => "httpd", + :key => "x_forwarded_host", + :value => "X-Host" + } + ] + + run_on_modified_server(server_config, fn -> + resp = + Couch.put("/#{db_name}/test3", + body: %{}, + headers: ["X-Host": "mysite2.com"] + ) + + assert resp.headers.hdrs["location"] == "http://mysite2.com/#{db_name}/test3" + end) + end + + @tag :with_db + test "COUCHDB-708: newlines document names", context do + db_name = context[:db_name] + + resp = + Couch.put("/#{db_name}/docid%0A/attachment.txt", + body: %{}, + headers: ["Content-Type": "text/plain;charset=utf-8"] + ) + + db_url = Couch.process_url("/" <> db_name) + assert resp.headers.hdrs["location"] == db_url <> "/docid%0A/attachment.txt" + + resp = + Couch.put("/#{db_name}/docidtest%0A", + body: %{}, + headers: ["Content-Type": "text/plain;charset=utf-8"] + ) + + db_url = Couch.process_url("/" <> db_name) + assert resp.headers.hdrs["location"] == db_url <> "/docidtest%0A" + + resp = + Couch.post("/#{db_name}/", + body: %{_id: "docidtestpost%0A"}, + headers: ["Content-Type": "application/json"] + ) + + db_url = Couch.process_url("/" <> db_name) + assert resp.headers.hdrs["location"] == db_url <> "/docidtestpost%250A" + end +end diff --git a/test/elixir/test/invalid_docids_test.exs b/test/elixir/test/invalid_docids_test.exs index edce5cc6595..c1d30c5fc53 100644 --- a/test/elixir/test/invalid_docids_test.exs +++ b/test/elixir/test/invalid_docids_test.exs @@ -2,6 +2,7 @@ defmodule InvalidDocIDsTest do use CouchTestCase @moduletag :invalid_doc_ids + @moduletag kind: :single_node @moduledoc """ Test invalid document ids diff --git a/test/elixir/test/jsonp_test.exs b/test/elixir/test/jsonp_test.exs new file mode 100644 index 00000000000..2e1934a22d9 --- /dev/null +++ b/test/elixir/test/jsonp_test.exs @@ -0,0 +1,117 @@ +defmodule JsonpTest do + use CouchTestCase + + @moduletag :jsonp + @moduletag kind: :single_node + + @tag :with_db + test "jsonp not configured callbacks", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, %{_id: "0", a: 0, b: 0}) + + resp = Couch.get("/#{db_name}/0?callback=jsonp_no_chunk") + assert resp.status_code == 200 + assert resp.headers.hdrs["content-type"] == "application/json" + end + + @tag :with_db + test "jsonp unchunked callbacks", context do + db_name = context[:db_name] + + server_config = [ + %{ + :section => "httpd", + :key => "allow_jsonp", + :value => "true" + } + ] + + {:ok, create_resp} = create_doc(db_name, %{_id: "0", a: 0, b: 0}) + + run_on_modified_server(server_config, fn -> + resp = Couch.get("/#{db_name}/0?callback=jsonp_no_chunk") + + assert resp.status_code == 200 + assert resp.headers.hdrs["content-type"] == "application/javascript" + + {callback_fun, callback_param} = parse_callback(resp.body) + + assert callback_fun == "jsonp_no_chunk" + assert create_resp.body["id"] == callback_param["_id"] + assert create_resp.body["rev"] == callback_param["_rev"] + + resp = Couch.get("/#{db_name}/0?callback=jsonp_no_chunk\"") + assert resp.status_code == 400 + end) + end + + @tag :with_db + test "jsonp chunked callbacks", context do + db_name = context[:db_name] + + server_config = [ + %{ + :section => "httpd", + :key => "allow_jsonp", + :value => "true" + } + ] + + design_doc = %{ + _id: "_design/test", + language: "javascript", + views: %{ + all_docs: %{map: "function(doc) {if(doc.a) emit(null, doc.a);}"} + } + } + + {:ok, _} = create_doc(db_name, design_doc) + {:ok, _} = create_doc(db_name, %{_id: "0", a: 0, b: 0}) + {:ok, _} = create_doc(db_name, %{_id: "1", a: 1, b: 1}) + + run_on_modified_server(server_config, fn -> + resp = Couch.get("/#{db_name}/_design/test/_view/all_docs?callback=jsonp_chunk") + assert resp.status_code == 200 + assert resp.headers.hdrs["content-type"] == "application/javascript" + + {callback_fun, callback_param} = parse_callback(resp.body) + + assert callback_fun == "jsonp_chunk" + assert callback_param["total_rows"] == 1 + + resp = Couch.get("/#{db_name}/_design/test/_view/all_docs?callback=jsonp_chunk'") + assert resp.status_code == 400 + + resp = Couch.get("/#{db_name}/_changes?callback=jsonp_chunk") + assert resp.status_code == 200 + assert resp.headers.hdrs["content-type"] == "application/javascript" + + {callback_fun, callback_param} = parse_callback(resp.body) + assert callback_fun == "jsonp_chunk" + assert length(callback_param["results"]) == 3 + + end) + end + + defp parse_callback(msg) do + captures = Regex.scan(~r/\/\* CouchDB \*\/(\w+)\((.*)\)/s, msg) + + callback_fun = + captures + |> Enum.map(fn p -> Enum.at(p, 1) end) + |> Enum.at(0) + + param = + captures + |> Enum.map(fn p -> Enum.at(p, 2) end) + |> Enum.filter(fn p -> String.trim(p) != "" end) + |> Enum.map(fn p -> + p + |> IO.iodata_to_binary() + |> :jiffy.decode([:return_maps]) + end) + |> Enum.at(0) + + {callback_fun, param} + end +end diff --git a/test/elixir/test/jwtauth_test.exs b/test/elixir/test/jwtauth_test.exs new file mode 100644 index 00000000000..7b6fe4a33b4 --- /dev/null +++ b/test/elixir/test/jwtauth_test.exs @@ -0,0 +1,218 @@ +defmodule JwtAuthTest do + use CouchTestCase + + @moduletag :authentication + @moduletag kind: :single_node + + test "jwt auth with HMAC secret", _context do + + secret = "zxczxc12zxczxc12" + + server_config = [ + %{ + :section => "jwt_keys", + :key => "hmac:_default", + :value => :base64.encode(secret) + }, + %{ + :section => "jwt_auth", + :key => "allowed_algorithms", + :value => "HS256, HS384, HS512" + } + ] + + run_on_modified_server(server_config, fn -> test_fun("HS256", secret) end) + run_on_modified_server(server_config, fn -> test_fun("HS384", secret) end) + run_on_modified_server(server_config, fn -> test_fun("HS512", secret) end) + end + + defmodule RSA do + require Record + Record.defrecord :public, :RSAPublicKey, + Record.extract(:RSAPublicKey, from_lib: "public_key/include/public_key.hrl") + Record.defrecord :private, :RSAPrivateKey, + Record.extract(:RSAPrivateKey, from_lib: "public_key/include/public_key.hrl") + end + + test "jwt auth with RSA secret", _context do + require JwtAuthTest.RSA + + private_key = :public_key.generate_key({:rsa, 2048, 17}) + public_key = RSA.public( + modulus: RSA.private(private_key, :modulus), + publicExponent: RSA.private(private_key, :publicExponent)) + + public_pem = :public_key.pem_encode( + [:public_key.pem_entry_encode( + :SubjectPublicKeyInfo, public_key)]) + public_pem = String.replace(public_pem, "\n", "\\n") + + server_config = [ + %{ + :section => "jwt_keys", + :key => "rsa:_default", + :value => public_pem + }, + %{ + :section => "jwt_auth", + :key => "allowed_algorithms", + :value => "RS256, RS384, RS512" + } + ] + + run_on_modified_server(server_config, fn -> test_fun("RS256", private_key) end) + run_on_modified_server(server_config, fn -> test_fun("RS384", private_key) end) + run_on_modified_server(server_config, fn -> test_fun("RS512", private_key) end) + end + + defmodule EC do + require Record + Record.defrecord :point, :ECPoint, + Record.extract(:ECPoint, from_lib: "public_key/include/public_key.hrl") + Record.defrecord :private, :ECPrivateKey, + Record.extract(:ECPrivateKey, from_lib: "public_key/include/public_key.hrl") + end + + test "jwt auth with EC secret", _context do + require JwtAuthTest.EC + + private_key = :public_key.generate_key({:namedCurve, :secp384r1}) + point = EC.point(point: EC.private(private_key, :publicKey)) + public_key = {point, EC.private(private_key, :parameters)} + + public_pem = :public_key.pem_encode( + [:public_key.pem_entry_encode( + :SubjectPublicKeyInfo, public_key)]) + public_pem = String.replace(public_pem, "\n", "\\n") + + server_config = [ + %{ + :section => "jwt_keys", + :key => "ec:_default", + :value => public_pem + }, + %{ + :section => "jwt_auth", + :key => "allowed_algorithms", + :value => "ES256, ES384, ES512" + } + ] + + run_on_modified_server(server_config, fn -> test_fun("ES256", private_key) end) + run_on_modified_server(server_config, fn -> test_fun("ES384", private_key) end) + run_on_modified_server(server_config, fn -> test_fun("ES512", private_key) end) + end + + def test_fun(alg, key) do + now = DateTime.to_unix(DateTime.utc_now()) + {:ok, token} = :jwtf.encode( + { + [ + {"alg", alg}, + {"typ", "JWT"} + ] + }, + { + [ + {"nbf", now - 60}, + {"exp", now + 60}, + {"sub", "couch@apache.org"}, + {"_couchdb.roles", ["testing"] + } + ] + }, key) + + resp = Couch.get("/_session", + headers: [authorization: "Bearer #{token}"] + ) + + assert resp.body["userCtx"]["name"] == "couch@apache.org" + assert resp.body["userCtx"]["roles"] == ["testing"] + assert resp.body["info"]["authenticated"] == "jwt" + end + + test "jwt auth without secret", _context do + + resp = Couch.get("/_session") + + assert resp.body["userCtx"]["name"] == "adm" + assert resp.body["info"]["authenticated"] == "default" + end + + test "jwt auth with required iss claim", _context do + + secret = "zxczxc12zxczxc12" + + server_config = [ + %{ + :section => "jwt_auth", + :key => "required_claims", + :value => "{iss, \"hello\"}" + }, + %{ + :section => "jwt_keys", + :key => "hmac:_default", + :value => :base64.encode(secret) + }, + %{ + :section => "jwt_auth", + :key => "allowed_algorithms", + :value => "HS256, HS384, HS512" + } + ] + + run_on_modified_server(server_config, fn -> good_iss("HS256", secret) end) + run_on_modified_server(server_config, fn -> bad_iss("HS256", secret) end) + end + + def good_iss(alg, key) do + {:ok, token} = :jwtf.encode( + { + [ + {"alg", alg}, + {"typ", "JWT"} + ] + }, + { + [ + {"iss", "hello"}, + {"sub", "couch@apache.org"}, + {"_couchdb.roles", ["testing"] + } + ] + }, key) + + resp = Couch.get("/_session", + headers: [authorization: "Bearer #{token}"] + ) + + assert resp.body["userCtx"]["name"] == "couch@apache.org" + assert resp.body["userCtx"]["roles"] == ["testing"] + assert resp.body["info"]["authenticated"] == "jwt" + end + + def bad_iss(alg, key) do + {:ok, token} = :jwtf.encode( + { + [ + {"alg", alg}, + {"typ", "JWT"} + ] + }, + { + [ + {"iss", "goodbye"}, + {"sub", "couch@apache.org"}, + {"_couchdb.roles", ["testing"] + } + ] + }, key) + + resp = Couch.get("/_session", + headers: [authorization: "Bearer #{token}"] + ) + + assert resp.status_code == 400 + end + +end diff --git a/test/elixir/test/large_docs_text.exs b/test/elixir/test/large_docs_text.exs index 4d2c5dedeb8..b80add60eb7 100644 --- a/test/elixir/test/large_docs_text.exs +++ b/test/elixir/test/large_docs_text.exs @@ -2,6 +2,8 @@ defmodule LargeDocsTest do use CouchTestCase @moduletag :large_docs + @moduletag kind: :single_node + @long_string "0123456789\n" @moduledoc """ diff --git a/test/elixir/test/list_views_test.exs b/test/elixir/test/list_views_test.exs new file mode 100644 index 00000000000..8e6314dfbc4 --- /dev/null +++ b/test/elixir/test/list_views_test.exs @@ -0,0 +1,581 @@ +defmodule ListViewsTest do + use CouchTestCase + + @moduletag kind: :single_node + + @ddoc %{ + _id: "_design/lists", + language: "javascript", + views: %{ + basicView: %{ + map: """ + function(doc) { + emit(doc.integer, doc.string); + } + """ + }, + withReduce: %{ + map: """ + function(doc) { + emit(doc.integer, doc.string); + } + """, + reduce: """ + function(keys, values, rereduce) { + if (rereduce) { + return sum(values); + } else { + return values.length; + } + } + """ + } + }, + lists: %{ + basicBasic: """ + function(head, req) { + send("head"); + var row; + while(row = getRow()) { + send(row.key); + }; + return "tail"; + } + """, + basicJSON: """ + function(head, req) { + start({"headers":{"Content-Type" : "application/json"}}); + send('{"head":'+toJSON(head)+', '); + send('"req":'+toJSON(req)+', '); + send('"rows":['); + var row, sep = ''; + while (row = getRow()) { + send(sep + toJSON(row)); + sep = ', '; + } + return "]}"; + } + """, + simpleForm: """ + function(head, req) { + send('
    '); + var row, row_number = 0, prevKey, firstKey = null; + while (row = getRow()) { + row_number += 1; + if (!firstKey) firstKey = row.key; + prevKey = row.key; + send('\\n
  • Key: '+row.key + +' Value: '+row.value + +' LineNo: '+row_number+'
  • '); + } + return '

FirstKey: '+ firstKey + ' LastKey: '+ prevKey+'

'; + } + """, + acceptSwitch: """ + function(head, req) { + // respondWith takes care of setting the proper headers + provides("html", function() { + send("HTML
    "); + + var row, num = 0; + while (row = getRow()) { + num ++; + send('\\n
  • Key: ' + +row.key+' Value: '+row.value + +' LineNo: '+num+'
  • '); + } + + // tail + return '
'; + }); + } + """, + qsParams: """ + function(head, req) { + return toJSON(req.query) + "\\n"; + } + """, + stopIter: """ + function(req) { + send("head"); + var row, row_number = 0; + while(row = getRow()) { + if(row_number > 2) break; + send(" " + row_number); + row_number += 1; + }; + return " tail"; + } + """, + stopIter2: """ + function(head, req) { + provides("html", function() { + send("head"); + var row, row_number = 0; + while(row = getRow()) { + if(row_number > 2) break; + send(" " + row_number); + row_number += 1; + }; + return " tail"; + }); + } + """, + tooManyGetRows: """ + function() { + send("head"); + var row; + while(row = getRow()) { + send(row.key); + }; + getRow(); + getRow(); + getRow(); + row = getRow(); + return "after row: "+toJSON(row); + } + """, + emptyList: """ + function() { + return " "; + } + """, + rowError: """ + function(head, req) { + send("head"); + var row = getRow(); + send(fooBarBam); // intentional error + return "tail"; + } + """, + docReference: """ + function(head, req) { + send("head"); + var row = getRow(); + send(row.doc.integer); + return "tail"; + } + """, + secObj: """ + function(head, req) { + return toJSON(req.secObj); + } + """, + setHeaderAfterGotRow: """ + function(head, req) { + getRow(); + start({ + code: 400, + headers: { + "X-My-Header": "MyHeader" + } + }); + send("bad request"); + } + """, + allDocs: """ + function(head, req){ + start({'headers': {'Content-Type': 'application/json'}}); + var resp = head; + var rows = []; + while(row=getRow()){ + rows.push(row); + } + resp.rows = rows; + return toJSON(resp); + } + """ + } + } + + @view_only_design_doc %{ + _id: "_design/views", + language: "javascript", + views: %{ + basicView: %{ + map: """ + function(doc) { + emit(-doc.integer, doc.string); + } + """ + } + } + } + + @erl_list_doc %{ + _id: "_design/erlang", + language: "erlang", + lists: %{ + simple: """ + fun(Head, {Req}) -> + Send(<<"[">>), + Fun = fun({Row}, Sep) -> + Val = couch_util:get_value(<<"key">>, Row, 23), + Send(list_to_binary(Sep ++ integer_to_list(Val))), + {ok, ","} + end, + {ok, _} = FoldRows(Fun, ""), + Send(<<"]">>) + end. + """ + } + } + + setup_all do + db_name = random_db_name() + {:ok, _} = create_db(db_name) + on_exit(fn -> delete_db(db_name) end) + + {:ok, _} = create_doc(db_name, @ddoc) + bulk_save(db_name, make_docs(0..9)) + + # Check setup + resp = view(db_name, "lists/basicView") + assert resp.body["total_rows"] == 10 + + db_name_cross = "#{db_name}_cross" + {:ok, _} = create_db(db_name_cross) + on_exit(fn -> delete_db(db_name_cross) end) + + {:ok, _} = create_doc(db_name_cross, @ddoc) + {:ok, _} = create_doc(db_name_cross, @view_only_design_doc) + bulk_save(db_name_cross, make_docs(0..9)) + + db_name_erlang = "#{db_name}_erlang" + {:ok, _} = create_db(db_name_erlang) + on_exit(fn -> delete_db(db_name_erlang) end) + + {:ok, _} = create_doc(db_name_erlang, @erl_list_doc) + {:ok, _} = create_doc(db_name_erlang, @view_only_design_doc) + bulk_save(db_name_erlang, make_docs(0..9)) + + {:ok, + [db_name: db_name, db_name_cross: db_name_cross, db_name_erlang: db_name_erlang]} + end + + test "standard GET", context do + db_name = context[:db_name] + resp = Rawresp.get("/#{db_name}/_design/lists/_list/basicBasic/basicView") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/head0123456789tail/) + end + + test "standard OPTIONS", context do + db_name = context[:db_name] + resp = Rawresp.options("/#{db_name}/_design/lists/_list/basicBasic/basicView") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/head0123456789tail/) + end + + test "the richness of the arguments", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design/lists/_list/basicJSON/basicView?update_seq=true") + + assert resp.status_code == 200 + assert resp.body["head"]["total_rows"] == 10 + assert resp.body["head"]["offset"] == 0 + assert length(resp.body["rows"]) == 10 + assert Enum.at(resp.body["rows"], 0) == %{"id" => "0", "key" => 0, "value" => "0"} + assert resp.body["req"]["info"]["db_name"] == db_name + assert resp.body["req"]["method"] == "GET" + + assert resp.body["req"]["path"] == [ + db_name, + "_design", + "lists", + "_list", + "basicJSON", + "basicView" + ] + + assert Map.has_key?(resp.body["req"]["headers"], "Host") == true + assert Map.has_key?(resp.body["req"]["headers"], "User-Agent") == true + assert Map.has_key?(resp.body["req"], "cookie") + + assert resp.body["req"]["raw_path"] == + "/#{db_name}/_design/lists/_list/basicJSON/basicView?update_seq=true" + end + + test "get with query params", context do + db_name = context[:db_name] + + resp = + Rawresp.get( + "/#{db_name}/_design/lists/_list/simpleForm/basicView?startkey=3&endkey=8" + ) + + assert resp.status_code == 200 + assert not String.match?(resp.body, ~r/Key: 1/) + assert String.match?(resp.body, ~r/FirstKey: 3/) + assert String.match?(resp.body, ~r/LastKey: 8/) + end + + test "with 0 rows", context do + db_name = context[:db_name] + + resp = Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/basicView?startkey=30") + + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/<\/ul>/) + end + + test "too many Get Rows", context do + db_name = context[:db_name] + + resp = Rawresp.get("/#{db_name}/_design/lists/_list/tooManyGetRows/basicView") + + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/9after row: null/) + end + + test "reduce with 0 rows", context do + db_name = context[:db_name] + + resp = + Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/withReduce?startkey=30") + + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/LastKey: undefined/) + end + + test "when there is a reduce present, but not used", context do + db_name = context[:db_name] + + resp = + Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/withReduce?reduce=false") + + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Key: 1/) + end + + test "when there is a reduce present, and used", context do + db_name = context[:db_name] + + resp = Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/withReduce?group=true") + + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Key: 1/) + end + + test "empty list", context do + db_name = context[:db_name] + + resp = Rawresp.get("/#{db_name}/_design/lists/_list/emptyList/basicView") + assert String.match?(resp.body, ~r/^ $/) + + resp = Rawresp.get("/#{db_name}/_design/lists/_list/emptyList/withReduce?group=true") + assert String.match?(resp.body, ~r/^ $/) + end + + test "multi-key fetch with POST", context do + db_name = context[:db_name] + + resp = + Rawresp.post("/#{db_name}/_design/lists/_list/simpleForm/basicView", + body: %{keys: [2, 4, 5, 7]} + ) + + assert resp.status_code == 200 + assert not String.match?(resp.body, ~r/Key: 1/) + assert String.match?(resp.body, ~r/Key: 2/) + assert String.match?(resp.body, ~r/FirstKey: 2/) + assert String.match?(resp.body, ~r/LastKey: 7/) + end + + test "multi-key fetch with GET", context do + db_name = context[:db_name] + + resp = + Rawresp.get("/#{db_name}/_design/lists/_list/simpleForm/basicView?keys=[2,4,5,7]") + + assert resp.status_code == 200 + assert not String.match?(resp.body, ~r/Key: 1/) + assert String.match?(resp.body, ~r/Key: 2/) + assert String.match?(resp.body, ~r/FirstKey: 2/) + assert String.match?(resp.body, ~r/LastKey: 7/) + end + + test "no multi-key fetch allowed when group=false", context do + db_name = context[:db_name] + + resp = + Rawresp.post("/#{db_name}/_design/lists/_list/simpleForm/withReduce?group=false", + body: %{keys: [2, 4, 5, 7]} + ) + + assert resp.status_code == 400 + assert String.match?(resp.body, ~r/query_parse_error/) + + resp = Rawresp.get("/#{db_name}/_design/lists/_list/rowError/basicView") + assert String.match?(resp.body, ~r/ReferenceError/) + end + + test "with include_docs and a reference to the doc", context do + db_name = context[:db_name] + + resp = + Rawresp.get( + "/#{db_name}/_design/lists/_list/docReference/basicView?include_docs=true" + ) + + assert String.match?(resp.body, ~r/head0tail/) + end + + test "extra qs params", context do + db_name = context[:db_name] + resp = Rawresp.get("/#{db_name}/_design/lists/_list/qsParams/basicView?foo=blam") + assert String.match?(resp.body, ~r/blam/) + end + + test "stop iteration", context do + db_name = context[:db_name] + resp = Rawresp.get("/#{db_name}/_design/lists/_list/stopIter/basicView") + assert String.match?(resp.body, ~r/^head 0 1 2 tail$/) + + resp = + Rawresp.get("/#{db_name}/_design/lists/_list/stopIter2/basicView", + headers: [Accept: "text/html"] + ) + + assert String.match?(resp.body, ~r/^head 0 1 2 tail$/) + end + + test "abort iteration with reduce", context do + db_name = context[:db_name] + + resp = Rawresp.get("/#{db_name}/_design/lists/_list/stopIter/withReduce?group=true") + assert String.match?(resp.body, ~r/^head 0 1 2 tail$/) + + resp = + Rawresp.get("/#{db_name}/_design/lists/_list/stopIter2/withReduce?group=true", + headers: [Accept: "text/html"] + ) + + assert String.match?(resp.body, ~r/^head 0 1 2 tail$/) + end + + test "with accept headers for HTML", context do + db_name = context[:db_name] + + resp = + Rawresp.get("/#{db_name}/_design/lists/_list/acceptSwitch/basicView", + headers: [Accept: "text/html"] + ) + + assert resp.headers["Content-Type"] == "text/html; charset=utf-8" + assert String.match?(resp.body, ~r/HTML/) + assert String.match?(resp.body, ~r/Value/) + end + + test "we can run lists and views from separate docs", context do + db_name = context[:db_name_cross] + + resp = + Rawresp.get( + "/#{db_name}/_design/lists/_list/simpleForm/views/basicView?startkey=-3" + ) + + assert resp.status_code == 200 + assert not String.match?(resp.body, ~r/Key: -4/) + assert String.match?(resp.body, ~r/FirstKey: -3/) + assert String.match?(resp.body, ~r/LastKey: 0/) + end + + test "we do multi-key requests on lists and views in separate docs", context do + db_name = context[:db_name_cross] + + resp = + Rawresp.post( + "/#{db_name}/_design/lists/_list/simpleForm/views/basicView", + body: %{keys: [-2, -4, -5, -7]} + ) + + assert resp.status_code == 200 + assert not String.match?(resp.body, ~r/Key: -3/) + assert String.match?(resp.body, ~r/Key: -7/) + assert String.match?(resp.body, ~r/FirstKey: -2/) + assert String.match?(resp.body, ~r/LastKey: -7/) + end + + test "secObj is available", context do + db_name = context[:db_name] + + resp = Couch.get("/#{db_name}/_design/lists/_list/secObj/basicView") + assert resp.status_code == 200 + assert is_map(resp.body) + end + + test "multiple languages in design docs", context do + db_name = context[:db_name_erlang] + + resp = + Couch.get("/#{db_name}/_design/erlang/_list/simple/views/basicView?startkey=-3") + + assert resp.status_code == 200 + assert length(resp.body) == 4 + + for i <- 0..3 do + assert Enum.at(resp.body, i) + 3 == i + end + end + + @tag :with_db + test "COUCHDB-1113", context do + db_name = context[:db_name] + + ddoc = %{ + _id: "_design/test", + views: %{ + me: %{ + map: "function(doc) { emit(null,null)}" + } + }, + lists: %{ + you: """ + function(head, req) { + var row; + while(row = getRow()) { + send(row); + } + } + """ + } + } + + {:ok, _} = create_doc(db_name, ddoc) + + resp = + Couch.get("/#{db_name}/_design/test/_list/you/me", + headers: [ + "Content-Type": "application/x-www-form-urlencoded" + ] + ) + + assert resp.status_code == 200 + end + + test "HTTP header response set after getRow() called in _list function", context do + db_name = context[:db_name] + + resp = Rawresp.get("/#{db_name}/_design/lists/_list/setHeaderAfterGotRow/basicView") + assert resp.status_code == 400 + assert resp.headers["X-My-Header"] == "MyHeader" + assert String.match?(resp.body, ~r/^bad request$/) + end + + test "handling _all_docs by _list functions. the result should be equal", context do + db_name = context[:db_name] + + resp_list = Couch.get("/#{db_name}/_design/lists/_list/allDocs/_all_docs") + assert resp_list.status_code == 200 + + resp_alldocs = Couch.get("/#{db_name}/_all_docs") + + assert resp_list.body["total_rows"] == resp_alldocs.body["total_rows"] + assert resp_list.body["offset"] == resp_alldocs.body["offset"] + assert length(resp_list.body["rows"]) == length(resp_alldocs.body["rows"]) + assert resp_list.body["rows"] == resp_alldocs.body["rows"] + end +end diff --git a/test/elixir/test/local_docs_test.exs b/test/elixir/test/local_docs_test.exs index ff071f3e623..d7ed137c871 100644 --- a/test/elixir/test/local_docs_test.exs +++ b/test/elixir/test/local_docs_test.exs @@ -2,6 +2,7 @@ defmodule LocalDocsTest do use CouchTestCase @moduletag :local_docs + @moduletag kind: :single_node @moduledoc """ Test CouchDB _local_docs diff --git a/test/elixir/test/lots_of_docs_test.exs b/test/elixir/test/lots_of_docs_test.exs index c0cc9919811..6f2e9f7c8ba 100644 --- a/test/elixir/test/lots_of_docs_test.exs +++ b/test/elixir/test/lots_of_docs_test.exs @@ -2,6 +2,8 @@ defmodule LotsOfDocsTest do use CouchTestCase @moduletag :lots_of_docs + @moduletag kind: :performance + @docs_range 0..499 @moduledoc """ diff --git a/test/elixir/test/method_override_test.exs b/test/elixir/test/method_override_test.exs new file mode 100644 index 00000000000..e264a870a45 --- /dev/null +++ b/test/elixir/test/method_override_test.exs @@ -0,0 +1,56 @@ +defmodule MethodOverrideTest do + use CouchTestCase + + @moduletag :http + @moduletag kind: :single_node + + @moduledoc """ + Allow broken HTTP clients to fake a full method vocabulary with an + X-HTTP-METHOD-OVERRIDE header + """ + + @tag :with_db + test "method override PUT", context do + db_name = context[:db_name] + + resp = + Couch.post("/#{db_name}/fnord", + body: %{bob: "connie"}, + headers: ["X-HTTP-Method-Override": "PUT"] + ) + + assert resp.status_code == 201 + + resp = Couch.get("/#{db_name}/fnord") + assert resp.body["bob"] == "connie" + end + + @tag :with_db + test "method override DELETE", context do + db_name = context[:db_name] + {:ok, resp} = create_doc(db_name, %{_id: "fnord", bob: "connie"}) + + resp = + Couch.post("/#{db_name}/fnord?rev=#{resp.body["rev"]}", + headers: ["X-HTTP-Method-Override": "DELETE"] + ) + + assert resp.status_code == 200 + + resp = Couch.get("/#{db_name}/fnord") + assert resp.status_code == 404 + end + + @tag :with_db + test "Method Override is ignored when original Method isn't POST", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/fnord2", + body: %{bob: "connie"}, + headers: ["X-HTTP-Method-Override": "PUT"] + ) + + assert resp.status_code == 404 + end +end diff --git a/test/elixir/test/multiple_rows_test.exs b/test/elixir/test/multiple_rows_test.exs index 64668282321..422b254b0ae 100644 --- a/test/elixir/test/multiple_rows_test.exs +++ b/test/elixir/test/multiple_rows_test.exs @@ -2,6 +2,7 @@ defmodule MultipleRowsTest do use CouchTestCase @moduletag :multiple_rows + @moduletag kind: :single_node @north_carolina_cities ["Charlotte", "Raleigh"] @massachussets_cities ["Boston", "Lowell", "Worcester", "Cambridge", "Springfield"] diff --git a/test/elixir/test/partition_all_docs_test.exs b/test/elixir/test/partition_all_docs_test.exs index 816a8d6ed04..8abc635f60a 100644 --- a/test/elixir/test/partition_all_docs_test.exs +++ b/test/elixir/test/partition_all_docs_test.exs @@ -6,6 +6,9 @@ defmodule PartitionAllDocsTest do Test Partition functionality for for all_docs """ + @moduletag :partition + @moduletag kind: :cluster + setup_all do db_name = random_db_name() {:ok, _} = create_db(db_name, query: %{partitioned: true, q: 1}) diff --git a/test/elixir/test/partition_crud_test.exs b/test/elixir/test/partition_crud_test.exs index 7e32abbdc10..f4a23ed28a7 100644 --- a/test/elixir/test/partition_crud_test.exs +++ b/test/elixir/test/partition_crud_test.exs @@ -1,6 +1,9 @@ defmodule PartitionCrudTest do use CouchTestCase + @moduletag :partition + @moduletag kind: :cluster + @tag :with_partitioned_db test "Sets partition in db info", context do db_name = context[:db_name] diff --git a/test/elixir/test/partition_ddoc_test.exs b/test/elixir/test/partition_ddoc_test.exs index 9fdfb9260dd..353d52af145 100644 --- a/test/elixir/test/partition_ddoc_test.exs +++ b/test/elixir/test/partition_ddoc_test.exs @@ -4,6 +4,9 @@ defmodule PartitionDDocTest do @moduledoc """ Test partition design doc interactions """ + + @moduletag :partition + @moduletag kind: :cluster setup do db_name = random_db_name() diff --git a/test/elixir/test/partition_design_docs_test.exs b/test/elixir/test/partition_design_docs_test.exs index 4ccd63fe06a..0de95b1fc1e 100644 --- a/test/elixir/test/partition_design_docs_test.exs +++ b/test/elixir/test/partition_design_docs_test.exs @@ -5,6 +5,9 @@ defmodule PartitionDesignDocsTest do Test Partition functionality for partition design docs """ + @moduletag :partition + @moduletag kind: :cluster + @tag :with_partitioned_db test "/_partition/:pk/_design/doc 404", context do db_name = context[:db_name] diff --git a/test/elixir/test/partition_mango_test.exs b/test/elixir/test/partition_mango_test.exs index 992999fb920..e203bac5b65 100644 --- a/test/elixir/test/partition_mango_test.exs +++ b/test/elixir/test/partition_mango_test.exs @@ -5,6 +5,10 @@ defmodule PartitionMangoTest do @moduledoc """ Test Partition functionality for mango """ + + @moduletag :partition + @moduletag kind: :cluster + def create_index(db_name, fields \\ ["some"], opts \\ %{}) do default_index = %{ index: %{ @@ -546,6 +550,59 @@ defmodule PartitionMangoTest do assert_correct_partition(partitions, "foo") end + @tag :with_partitioned_db + test "partitioned query with query server config set", context do + db_name = context[:db_name] + create_partition_docs(db_name) + create_index(db_name, ["value"]) + + # this is to test that we bypass partition_query_limit for mango + set_config({"query_server_config", "partition_query_limit", "1"}) + + url = "/#{db_name}/_partition/foo/_find" + + resp = + Couch.post( + url, + body: %{ + selector: %{ + value: %{ + "$gte": 6, + "$lt": 16 + } + }, + limit: 3 + } + ) + + assert resp.status_code == 200 + partitions = get_partitions(resp) + assert length(partitions) == 3 + assert_correct_partition(partitions, "foo") + + %{:body => %{"bookmark" => bookmark}} = resp + + resp = + Couch.post( + url, + body: %{ + selector: %{ + value: %{ + "$gte": 6, + "$lt": 16 + } + }, + limit: 3, + bookmark: bookmark + } + ) + + assert resp.status_code == 200 + partitions = get_partitions(resp) + assert length(partitions) == 2 + assert_correct_partition(partitions, "foo") + end + @tag :with_partitioned_db test "global query uses global index", context do db_name = context[:db_name] diff --git a/test/elixir/test/partition_size_limit_test.exs b/test/elixir/test/partition_size_limit_test.exs index 5141d0d8ba3..de857bd3112 100644 --- a/test/elixir/test/partition_size_limit_test.exs +++ b/test/elixir/test/partition_size_limit_test.exs @@ -5,6 +5,9 @@ defmodule PartitionSizeLimitTest do Test Partition size limit functionality """ + @moduletag :partition + @moduletag kind: :cluster + @max_size 10_240 setup do @@ -68,18 +71,6 @@ defmodule PartitionSizeLimitTest do assert resp.status_code in [201, 202] end - defp compact(db) do - assert Couch.post("/#{db}/_compact").status_code == 202 - - retry_until( - fn -> - Couch.get("/#{db}").body["compact_running"] == false - end, - 200, - 20_000 - ) - end - test "fill partition manually", context do db_name = context[:db_name] partition = "foo" diff --git a/test/elixir/test/partition_size_test.exs b/test/elixir/test/partition_size_test.exs index 2ba8139fc65..5dfa5b5d80a 100644 --- a/test/elixir/test/partition_size_test.exs +++ b/test/elixir/test/partition_size_test.exs @@ -4,6 +4,9 @@ defmodule PartitionSizeTest do @moduledoc """ Test Partition size functionality """ + + @moduletag :partition + @moduletag kind: :cluster setup do db_name = random_db_name() diff --git a/test/elixir/test/partition_view_test.exs b/test/elixir/test/partition_view_test.exs index 0a55c24439d..bea5e1d8c11 100644 --- a/test/elixir/test/partition_view_test.exs +++ b/test/elixir/test/partition_view_test.exs @@ -5,6 +5,9 @@ defmodule ViewPartitionTest do @moduledoc """ Test Partition functionality for views """ + + @moduletag :partition + @moduletag kind: :cluster setup_all do db_name = random_db_name() diff --git a/test/elixir/test/partition_view_update_test.exs b/test/elixir/test/partition_view_update_test.exs index 5c1cb09f02d..390e141983a 100644 --- a/test/elixir/test/partition_view_update_test.exs +++ b/test/elixir/test/partition_view_update_test.exs @@ -5,6 +5,10 @@ defmodule PartitionViewUpdateTest do @moduledoc """ Test Partition view update functionality """ + + @moduletag :partition + @moduletag kind: :cluster + @tag :with_partitioned_db test "view updates properly remove old keys", context do db_name = context[:db_name] diff --git a/test/elixir/test/proxyauth_test.exs b/test/elixir/test/proxyauth_test.exs new file mode 100644 index 00000000000..b152e9bd5ce --- /dev/null +++ b/test/elixir/test/proxyauth_test.exs @@ -0,0 +1,164 @@ +defmodule ProxyAuthTest do + use CouchTestCase + + @moduletag :authentication + @moduletag kind: :single_node + + @tag :with_db + test "proxy auth with secret", context do + db_name = context[:db_name] + + design_doc = %{ + _id: "_design/test", + language: "javascript", + shows: %{ + welcome: """ + function(doc,req) { + return "Welcome " + req.userCtx["name"]; + } + """, + role: """ + function(doc, req) { + return req.userCtx['roles'][0]; + } + """ + } + } + + {:ok, _} = create_doc(db_name, design_doc) + + users_db_name = random_db_name() + create_db(users_db_name) + + secret = generate_secret(64) + + server_config = [ + %{ + :section => "chttpd_auth", + :key => "authentication_db", + :value => users_db_name + }, + %{ + :section => "couch_httpd_auth", + :key => "proxy_use_secret", + :value => "true" + }, + %{ + :section => "couch_httpd_auth", + :key => "secret", + :value => secret + } + ] + + run_on_modified_server(server_config, fn -> + test_fun(db_name, users_db_name, secret) + end) + delete_db(users_db_name) + end + + defp generate_secret(len) do + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" + |> String.splitter("", trim: true) + |> Enum.take_random(len) + |> Enum.join("") + end + + defp hex_hmac_sha1(secret, message) do + signature = :crypto.hmac(:sha, secret, message) + Base.encode16(signature, case: :lower) + end + + def test_fun(db_name, users_db_name, secret) do + user = prepare_user_doc(name: "couch@apache.org", password: "test") + create_doc(users_db_name, user) + + resp = + Couch.get("/_session", + headers: [authorization: "Basic Y291Y2hAYXBhY2hlLm9yZzp0ZXN0"] + ) + + assert resp.body["userCtx"]["name"] == "couch@apache.org" + assert resp.body["info"]["authenticated"] == "default" + + headers = [ + "X-Auth-CouchDB-UserName": "couch@apache.org", + "X-Auth-CouchDB-Roles": "test", + "X-Auth-CouchDB-Token": hex_hmac_sha1(secret, "couch@apache.org") + ] + resp = Couch.get("/#{db_name}/_design/test/_show/welcome", headers: headers) + assert resp.body == "Welcome couch@apache.org" + + resp = Couch.get("/#{db_name}/_design/test/_show/role", headers: headers) + assert resp.body == "test" + end + + @tag :with_db + test "proxy auth without secret", context do + db_name = context[:db_name] + + design_doc = %{ + _id: "_design/test", + language: "javascript", + shows: %{ + welcome: """ + function(doc,req) { + return "Welcome " + req.userCtx["name"]; + } + """, + role: """ + function(doc, req) { + return req.userCtx['roles'][0]; + } + """ + } + } + + {:ok, _} = create_doc(db_name, design_doc) + + users_db_name = random_db_name() + create_db(users_db_name) + + server_config = [ + %{ + :section => "chttpd_auth", + :key => "authentication_db", + :value => users_db_name + }, + %{ + :section => "couch_httpd_auth", + :key => "proxy_use_secret", + :value => "false" + } + ] + + run_on_modified_server(server_config, fn -> + test_fun_no_secret(db_name, users_db_name) + end) + + delete_db(users_db_name) + end + + def test_fun_no_secret(db_name, users_db_name) do + user = prepare_user_doc(name: "couch@apache.org", password: "test") + create_doc(users_db_name, user) + + resp = + Couch.get("/_session", + headers: [authorization: "Basic Y291Y2hAYXBhY2hlLm9yZzp0ZXN0"] + ) + + assert resp.body["userCtx"]["name"] == "couch@apache.org" + assert resp.body["info"]["authenticated"] == "default" + + headers = [ + "X-Auth-CouchDB-UserName": "couch@apache.org", + "X-Auth-CouchDB-Roles": "test" + ] + + resp = Couch.get("/#{db_name}/_design/test/_show/welcome", headers: headers) + assert resp.body == "Welcome couch@apache.org" + + resp = Couch.get("/#{db_name}/_design/test/_show/role", headers: headers) + assert resp.body == "test" + end +end diff --git a/test/elixir/test/purge_test.exs b/test/elixir/test/purge_test.exs new file mode 100644 index 00000000000..1a069083b9c --- /dev/null +++ b/test/elixir/test/purge_test.exs @@ -0,0 +1,151 @@ +defmodule PurgeTest do + use CouchTestCase + + @moduletag :purge + @moduletag kind: :single_node + + @tag :with_db + test "purge documents", context do + db_name = context[:db_name] + + design_doc = %{ + _id: "_design/test", + language: "javascript", + views: %{ + all_docs_twice: %{ + map: "function(doc) { emit(doc.integer, null); emit(doc.integer, null) }" + }, + single_doc: %{ + map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}" + } + } + } + + {:ok, _} = create_doc(db_name, design_doc) + + num_docs = 10 + bulk_save(db_name, make_docs(1..(num_docs + 1))) + + test_all_docs_twice(db_name, num_docs, 1) + + info = info(db_name) + + doc1 = open_doc(db_name, 1) + doc2 = open_doc(db_name, 2) + + resp = + Couch.post("/#{db_name}/_purge", + body: %{"1": [doc1["_rev"]], "2": [doc2["_rev"]]} + ) + + assert resp.status_code == 201 + result = resp.body + + assert Enum.at(result["purged"]["1"], 0) == doc1["_rev"] + assert Enum.at(result["purged"]["2"], 0) == doc2["_rev"] + + open_doc(db_name, 1, 404) + open_doc(db_name, 2, 404) + + purged_info = info(db_name) + + assert purged_info["purge_seq"] != info["purge_seq"] + + test_all_docs_twice(db_name, num_docs, 0, 2) + + # purge sequences are preserved after compaction (COUCHDB-1021) + compact(db_name) + + compacted_info = info(db_name) + assert compacted_info["purge_seq"] == purged_info["purge_seq"] + + # purge documents twice in a row without loading views + # (causes full view rebuilds) + + doc3 = open_doc(db_name, 3) + doc4 = open_doc(db_name, 4) + + resp = + Couch.post("/#{db_name}/_purge", + body: %{"3": [doc3["_rev"]]} + ) + + assert resp.status_code == 201 + + resp = + Couch.post("/#{db_name}/_purge", + body: %{"4": [doc4["_rev"]]} + ) + + assert resp.status_code == 201 + + test_all_docs_twice(db_name, num_docs, 0, 4) + end + + @tag :with_db + test "COUCHDB-1065", context do + db_name_a = context[:db_name] + db_name_b = random_db_name() + {:ok, _} = create_db(db_name_b) + + {:ok, doc_a_resp} = create_doc(db_name_a, %{_id: "test", a: 1}) + {:ok, doc_b_resp} = create_doc(db_name_b, %{_id: "test", a: 2}) + replicate(db_name_a, db_name_b) + + open_rev(db_name_b, "test", doc_a_resp.body["rev"], 200) + open_rev(db_name_b, "test", doc_b_resp.body["rev"], 200) + + resp = + Couch.post("/#{db_name_b}/_purge", + body: %{test: [doc_a_resp.body["rev"]]} + ) + + assert resp.status_code == 201 + + open_rev(db_name_b, "test", doc_a_resp.body["rev"], 404) + + resp = + Couch.post("/#{db_name_b}/_purge", + body: %{test: [doc_b_resp.body["rev"]]} + ) + + assert resp.status_code == 201 + + open_rev(db_name_b, "test", doc_b_resp.body["rev"], 404) + + resp = + Couch.post("/#{db_name_b}/_purge", + body: %{test: [doc_a_resp.body["rev"], doc_b_resp.body["rev"]]} + ) + + assert resp.status_code == 201 + + delete_db(db_name_b) + end + + defp open_doc(db_name, id, expect \\ 200) do + resp = Couch.get("/#{db_name}/#{id}") + assert resp.status_code == expect + resp.body + end + + defp open_rev(db_name, id, rev, expect) do + resp = Couch.get("/#{db_name}/#{id}?rev=#{rev}") + assert resp.status_code == expect + resp.body + end + + defp test_all_docs_twice(db_name, num_docs, sigle_doc_expect, offset \\ 0) do + resp = Couch.get("/#{db_name}/_design/test/_view/all_docs_twice") + assert resp.status_code == 200 + rows = resp.body["rows"] + + for x <- 0..(num_docs - offset) do + assert Map.get(Enum.at(rows, 2 * x), "key") == x + offset + 1 + assert Map.get(Enum.at(rows, 2 * x + 1), "key") == x + offset + 1 + end + + resp = Couch.get("/#{db_name}/_design/test/_view/single_doc") + assert resp.body["total_rows"] == sigle_doc_expect + end +end diff --git a/test/elixir/test/reader_acl_test.exs b/test/elixir/test/reader_acl_test.exs new file mode 100644 index 00000000000..3cbd5c8866d --- /dev/null +++ b/test/elixir/test/reader_acl_test.exs @@ -0,0 +1,255 @@ +defmodule ReaderACLTest do + use CouchTestCase + + @moduletag :authentication + @moduletag kind: :single_node + + @users_db_name "custom-users" + @password "funnybone" + + @moduletag config: [ + { + "chttpd_auth", + "authentication_db", + @users_db_name + }, + { + "couch_httpd_auth", + "authentication_db", + @users_db_name + } + ] + setup do + # Create db if not exists + Couch.put("/#{@users_db_name}") + + # create a user with top-secret-clearance + user_doc = + prepare_user_doc([ + {:name, "bond@apache.org"}, + {:password, @password}, + {:roles, ["top-secret"]} + ]) + + {:ok, _} = create_doc(@users_db_name, user_doc) + + # create a user with top-secret-clearance + user_doc = + prepare_user_doc([ + {:name, "juanjo@apache.org"}, + {:password, @password} + ]) + + {:ok, _} = create_doc(@users_db_name, user_doc) + + on_exit(&tear_down/0) + + :ok + end + + defp tear_down do + delete_db(@users_db_name) + end + + defp login(user, password) do + sess = Couch.login(user, password) + assert sess.cookie, "Login correct is expected" + sess + end + + defp logout(session) do + assert Couch.Session.logout(session).body["ok"] + end + + defp open_as(db_name, doc_id, options) do + use_session = Keyword.get(options, :use_session) + user = Keyword.get(options, :user) + expect_response = Keyword.get(options, :expect_response, 200) + expect_message = Keyword.get(options, :error_message) + + session = use_session || login(user, @password) + + resp = + Couch.Session.get( + session, + "/#{db_name}/#{URI.encode(doc_id)}" + ) + + if use_session == nil do + logout(session) + end + + assert resp.status_code == expect_response + + if expect_message != nil do + assert resp.body["error"] == expect_message + end + + resp.body + end + + defp set_security(db_name, security, expect_response \\ 200) do + resp = Couch.put("/#{db_name}/_security", body: security) + assert resp.status_code == expect_response + end + + @tag :with_db + test "unrestricted db can be read", context do + db_name = context[:db_name] + + doc = %{_id: "baz", foo: "bar"} + {:ok, _} = create_doc(db_name, doc) + + # any user can read unrestricted db + open_as(db_name, "baz", user: "juanjo@apache.org") + open_as(db_name, "baz", user: "bond@apache.org") + end + + @tag :with_db + test "restricted db can be read by authorized users", context do + db_name = context[:db_name] + + doc = %{_id: "baz", foo: "bar"} + {:ok, _} = create_doc(db_name, doc) + + security = %{ + members: %{ + roles: ["super-secret-club"], + names: ["joe", "barb"] + } + } + + set_security(db_name, security) + + # can't read it as bond is missing the needed role + open_as(db_name, "baz", user: "bond@apache.org", expect_response: 403) + + # make anyone with the top-secret role an admin + # db admins are automatically members + security = %{ + admins: %{ + roles: ["top-secret"], + names: [] + }, + members: %{ + roles: ["super-secret-club"], + names: ["joe", "barb"] + } + } + + set_security(db_name, security) + + # db admin can read + open_as(db_name, "baz", user: "bond@apache.org") + + # admin now adds the top-secret role to the db's members + # and removes db-admins + security = %{ + admins: %{ + roles: [], + names: [] + }, + members: %{ + roles: ["super-secret-club", "top-secret"], + names: ["joe", "barb"] + } + } + + set_security(db_name, security) + + # server _admin can always read + resp = Couch.get("/#{db_name}/baz") + assert resp.status_code == 200 + + open_as(db_name, "baz", user: "bond@apache.org") + end + + @tag :with_db + test "works with readers (backwards compat with 1.0)", context do + db_name = context[:db_name] + + doc = %{_id: "baz", foo: "bar"} + {:ok, _} = create_doc(db_name, doc) + + security = %{ + admins: %{ + roles: [], + names: [] + }, + readers: %{ + roles: ["super-secret-club", "top-secret"], + names: ["joe", "barb"] + } + } + + set_security(db_name, security) + open_as(db_name, "baz", user: "bond@apache.org") + end + + @tag :with_db + test "can't set non string reader names or roles", context do + db_name = context[:db_name] + + security = %{ + members: %{ + roles: ["super-secret-club", %{"top-secret": "awesome"}], + names: ["joe", "barb"] + } + } + + set_security(db_name, security, 500) + + security = %{ + members: %{ + roles: ["super-secret-club", "top-secret"], + names: ["joe", 22] + } + } + + set_security(db_name, security, 500) + + security = %{ + members: %{ + roles: ["super-secret-club", "top-secret"], + names: "joe" + } + } + + set_security(db_name, security, 500) + end + + @tag :with_db + test "members can query views", context do + db_name = context[:db_name] + + doc = %{_id: "baz", foo: "bar"} + {:ok, _} = create_doc(db_name, doc) + + security = %{ + admins: %{ + roles: [], + names: [] + }, + members: %{ + roles: ["super-secret-club", "top-secret"], + names: ["joe", "barb"] + } + } + + set_security(db_name, security) + + view = %{ + _id: "_design/foo", + views: %{ + bar: %{ + map: "function(doc){emit(null, null)}" + } + } + } + + {:ok, _} = create_doc(db_name, view) + + # members can query views + open_as(db_name, "_design/foo/_view/bar", user: "bond@apache.org") + end +end diff --git a/test/elixir/test/recreate_doc_test.exs b/test/elixir/test/recreate_doc_test.exs new file mode 100644 index 00000000000..9ee914dc21e --- /dev/null +++ b/test/elixir/test/recreate_doc_test.exs @@ -0,0 +1,166 @@ +defmodule RecreateDocTest do + use CouchTestCase + + @moduletag :recreate_doc + @moduletag kind: :single_node + + @moduledoc """ + Test CouchDB document recreation + This is a port of the recreate_doc.js suite + """ + + @tag :with_db + test "recreate document", context do + db_name = context[:db_name] + + # First create a new document with the ID "foo", and delete it again + doc = %{_id: "foo", a: "bar", b: 42} + {:ok, resp} = create_doc(db_name, doc) + first_rev = resp.body["rev"] + + resp = Couch.delete("/#{db_name}/foo?rev=#{first_rev}") + assert resp.status_code == 200 + + # Now create a new document with the same ID, save it, and then modify it + doc = %{_id: "foo"} + + for _i <- 0..9 do + {:ok, _} = create_doc(db_name, doc) + resp = Couch.get("/#{db_name}/foo") + + updated_doc = + resp.body + |> Map.put("a", "baz") + + resp = Couch.put("/#{db_name}/foo", body: updated_doc) + assert resp.status_code == 201 + rev = resp.body["rev"] + resp = Couch.delete("/#{db_name}/foo?rev=#{rev}") + assert resp.status_code == 200 + end + end + + @tag :with_db + test "COUCHDB-292 - recreate a deleted document", context do + db_name = context[:db_name] + # First create a new document with the ID "foo", and delete it again + doc = %{_id: "foo", a: "bar", b: 42} + {:ok, resp} = create_doc(db_name, doc) + first_rev = resp.body["rev"] + + resp = Couch.delete("/#{db_name}/foo?rev=#{first_rev}") + assert resp.status_code == 200 + + # COUCHDB-292 now attempt to save the document with a prev that's since + # been deleted and this should generate a conflict exception + updated_doc = + doc + |> Map.put(:_rev, first_rev) + + resp = Couch.put("/#{db_name}/foo", body: updated_doc) + assert resp.status_code == 409 + + # same as before, but with binary + bin_att_doc = %{ + _id: "foo", + _rev: first_rev, + _attachments: %{ + "foo.txt": %{ + content_type: "text/plain", + data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=" + } + } + } + + resp = Couch.put("/#{db_name}/foo", body: bin_att_doc) + assert resp.status_code == 409 + end + + @tag :with_db + test "Recreate a deleted document with non-exsistant rev", context do + db_name = context[:db_name] + + doc = %{_id: "foo", a: "bar", b: 42} + {:ok, resp} = create_doc(db_name, doc) + first_rev = resp.body["rev"] + + resp = Couch.delete("/#{db_name}/foo?rev=#{first_rev}") + assert resp.status_code == 200 + + # random non-existant prev rev + updated_doc = + doc + |> Map.put(:_rev, "1-asfafasdf") + + resp = Couch.put("/#{db_name}/foo", body: updated_doc) + assert resp.status_code == 409 + + # random non-existant prev rev with bin + bin_att_doc = %{ + _id: "foo", + _rev: "1-aasasfasdf", + _attachments: %{ + "foo.txt": %{ + content_type: "text/plain", + data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=" + } + } + } + + resp = Couch.put("/#{db_name}/foo", body: bin_att_doc) + assert resp.status_code == 409 + end + + @tag :with_db + test "COUCHDB-1265 - changes feed after we try and break the update_seq tree", + context do + db_name = context[:db_name] + + # Test COUCHDB-1265 - Reinserting an old revision into the revision tree causes + # duplicates in the update_seq tree. + revs = create_rev_doc(db_name, "a", 3) + + resp = + Couch.put("/#{db_name}/a", + body: Enum.at(revs, 0), + query: [new_edits: false] + ) + + assert resp.status_code == 201 + + resp = + Couch.put("/#{db_name}/a", + body: Enum.at(revs, -1) + ) + + assert resp.status_code == 201 + + resp = Couch.get("/#{db_name}/_changes") + assert resp.status_code == 200 + + assert length(resp.body["results"]) == 1 + end + + # function to create a doc with multiple revisions + defp create_rev_doc(db_name, id, num_revs) do + doc = %{_id: id, count: 0} + {:ok, resp} = create_doc(db_name, doc) + create_rev_doc(db_name, id, num_revs, [Map.put(doc, :_rev, resp.body["rev"])]) + end + + defp create_rev_doc(db_name, id, num_revs, revs) do + if length(revs) < num_revs do + doc = %{_id: id, _rev: Enum.at(revs, -1)[:_rev], count: length(revs)} + {:ok, resp} = create_doc(db_name, doc) + + create_rev_doc( + db_name, + id, + num_revs, + revs ++ [Map.put(doc, :_rev, resp.body["rev"])] + ) + else + revs + end + end +end diff --git a/test/elixir/test/reduce_builtin_test.exs b/test/elixir/test/reduce_builtin_test.exs new file mode 100644 index 00000000000..410ca59899c --- /dev/null +++ b/test/elixir/test/reduce_builtin_test.exs @@ -0,0 +1,283 @@ +defmodule ReduceBuiltinTest do + use CouchTestCase + + @moduletag :views + @moduletag kind: :single_node + + @moduledoc """ + Test CouchDB view builtin reduce functions + This is a port of the reduce_builtin.js suite + """ + + def random_ddoc(db_name) do + "/#{db_name}/_design/#{:erlang.monotonic_time()}" + end + + def summate(n) do + (n + 1) * n / 2 + end + + def sumsqr(n) do + 1..n |> Enum.reduce(0, fn i, acc -> acc + i * i end) + end + + def check_approx_distinct(expected, estimated) do + # see https://en.wikipedia.org/wiki/HyperLogLog + err = 1.04 / :math.sqrt(:math.pow(2, 11 - 1)) + abs(expected - estimated) < expected * err + end + + def query_rows(ddoc_url, builtin_fun, query \\ nil) do + http_opts = if query, do: [query: query], else: [] + Couch.get("#{ddoc_url}/_view/builtin#{builtin_fun}", http_opts).body["rows"] + end + + def query_value(ddoc_url, builtin_fun, query \\ nil) do + hd(query_rows(ddoc_url, builtin_fun, query))["value"] + end + + @tag :with_db + test "Builtin reduce functions", context do + db_name = context[:db_name] + num_docs = 500 + + docs = make_docs(1..num_docs) + + resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs}, query: %{w: 3}) + assert resp.status_code in [201, 202] + + ddoc_url = random_ddoc(db_name) + + map = ~s""" + function (doc) { + emit(doc.integer, doc.integer); + emit(doc.integer, doc.integer); + }; + """ + + design_doc = %{ + :views => %{ + :builtin_sum => %{:map => map, :reduce => "_sum"}, + :builtin_count => %{:map => map, :reduce => "_count"}, + :builtin_stats => %{:map => map, :reduce => "_stats"}, + :builtin_approx_count_distinct => %{ + :map => map, + :reduce => "_approx_count_distinct" + } + } + } + + assert Couch.put(ddoc_url, body: design_doc).body["ok"] + + value = ddoc_url |> query_value("_sum") + assert value == 2 * summate(num_docs) + value = ddoc_url |> query_value("_count") + assert value == 1000 + value = ddoc_url |> query_value("_stats") + assert value["sum"] == 2 * summate(num_docs) + assert value["count"] == 1000 + assert value["min"] == 1 + assert value["max"] == 500 + assert value["sumsqr"] == 2 * sumsqr(num_docs) + value = ddoc_url |> query_value("_approx_count_distinct") + assert check_approx_distinct(num_docs, value) + + value = ddoc_url |> query_value("_sum", %{startkey: 4, endkey: 4}) + assert value == 8 + value = ddoc_url |> query_value("_count", %{startkey: 4, endkey: 4}) + assert value == 2 + value = ddoc_url |> query_value("_approx_count_distinct", %{startkey: 4, endkey: 4}) + assert check_approx_distinct(1, value) + + value = ddoc_url |> query_value("_sum", %{startkey: 4, endkey: 5}) + assert value == 18 + value = ddoc_url |> query_value("_count", %{startkey: 4, endkey: 5}) + assert value == 4 + value = ddoc_url |> query_value("_approx_count_distinct", %{startkey: 4, endkey: 5}) + assert check_approx_distinct(2, value) + + value = ddoc_url |> query_value("_sum", %{startkey: 4, endkey: 6}) + assert value == 30 + value = ddoc_url |> query_value("_count", %{startkey: 4, endkey: 6}) + assert value == 6 + value = ddoc_url |> query_value("_approx_count_distinct", %{startkey: 4, endkey: 6}) + assert check_approx_distinct(3, value) + + assert [row0, row1, row2] = ddoc_url |> query_rows("_sum", %{group: true, limit: 3}) + assert row0["value"] == 2 + assert row1["value"] == 4 + assert row2["value"] == 6 + + assert [row0, row1, row2] = + ddoc_url |> query_rows("_approx_count_distinct", %{group: true, limit: 3}) + + assert check_approx_distinct(1, row0["value"]) + assert check_approx_distinct(1, row1["value"]) + assert check_approx_distinct(1, row2["value"]) + + 1..div(500, 2) + |> Enum.take_every(30) + |> Enum.each(fn i -> + value = ddoc_url |> query_value("_sum", %{startkey: i, endkey: num_docs - i}) + assert value == 2 * (summate(num_docs - i) - summate(i - 1)) + end) + end + + @tag :with_db + test "Builtin reduce functions with trailings", context do + db_name = context[:db_name] + num_docs = 500 + + docs = make_docs(1..num_docs) + + resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs}, query: %{w: 3}) + assert resp.status_code in [201, 202] + + # test for trailing characters after builtin functions, desired behaviour + # is to disregard any trailing characters + # I think the behavior should be a prefix test, so that even "_statsorama" + # or "_stats\nare\awesome" should work just as "_stats" does. - JChris + ["\n", "orama", "\nare\nawesome", " ", " \n "] + |> Enum.each(fn trailing -> + ddoc_url = random_ddoc(db_name) + + map = ~s""" + function (doc) { + emit(doc.integer, doc.integer); + emit(doc.integer, doc.integer); + }; + """ + + design_doc = %{ + :views => %{ + :builtin_sum => %{:map => map, :reduce => "_sum#{trailing}"}, + :builtin_count => %{:map => map, :reduce => "_count#{trailing}"}, + :builtin_stats => %{:map => map, :reduce => "_stats#{trailing}"}, + :builtin_approx_count_distinct => %{ + :map => map, + :reduce => "_approx_count_distinct#{trailing}" + } + } + } + + assert Couch.put(ddoc_url, body: design_doc).body["ok"] + + value = ddoc_url |> query_value("_sum") + assert value == 2 * summate(num_docs) + value = ddoc_url |> query_value("_count") + assert value == 1000 + value = ddoc_url |> query_value("_stats") + assert value["sum"] == 2 * summate(num_docs) + assert value["count"] == 1000 + assert value["min"] == 1 + assert value["max"] == 500 + assert value["sumsqr"] == 2 * sumsqr(num_docs) + end) + end + + @tag :with_db + test "Builtin count and sum reduce for key as array", context do + db_name = context[:db_name] + + ddoc_url = random_ddoc(db_name) + + map_one = ~s""" + function (doc) { + emit(doc.keys, 1); + }; + """ + + map_ones_array = ~s""" + function (doc) { + emit(doc.keys, [1, 1]); + }; + """ + + design_doc = %{ + :views => %{ + :builtin_one_sum => %{:map => map_one, :reduce => "_sum"}, + :builtin_one_count => %{:map => map_one, :reduce => "_count"}, + :builtin_ones_array_sum => %{:map => map_ones_array, :reduce => "_sum"} + } + } + + assert Couch.put(ddoc_url, body: design_doc).body["ok"] + + for i <- 1..5 do + for j <- 0..9 do + docs = [ + %{keys: ["a"]}, + %{keys: ["a"]}, + %{keys: ["a", "b"]}, + %{keys: ["a", "b"]}, + %{keys: ["a", "b", "c"]}, + %{keys: ["a", "b", "d"]}, + %{keys: ["a", "c", "d"]}, + %{keys: ["d"]}, + %{keys: ["d", "a"]}, + %{keys: ["d", "b"]}, + %{keys: ["d", "c"]} + ] + + resp = Couch.post("/#{db_name}/_bulk_docs", body: %{docs: docs}, query: %{w: 3}) + assert resp.status_code in [201, 202] + + total_docs = 1 + (i - 1) * 10 * 11 + (j + 1) * 11 + assert Couch.get("/#{db_name}").body["doc_count"] == total_docs + end + + ["_sum", "_count"] + |> Enum.each(fn builtin -> + builtin = "_one#{builtin}" + + # group by exact key match + rows = query_rows(ddoc_url, builtin, %{group: true}) + assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => 20 * i} + assert Enum.at(rows, 1) == %{"key" => ["a", "b"], "value" => 20 * i} + assert Enum.at(rows, 2) == %{"key" => ["a", "b", "c"], "value" => 10 * i} + assert Enum.at(rows, 3) == %{"key" => ["a", "b", "d"], "value" => 10 * i} + + # make sure group reduce and limit params provide valid json + assert [row0, _] = query_rows(ddoc_url, builtin, %{group: true, limit: 2}) + assert row0 == %{"key" => ["a"], "value" => 20 * i} + + # group by the first element in the key array + rows = query_rows(ddoc_url, builtin, %{group_level: 1}) + assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => 70 * i} + assert Enum.at(rows, 1) == %{"key" => ["d"], "value" => 40 * i} + + # group by the first 2 elements in the key array + rows = query_rows(ddoc_url, builtin, %{group_level: 2}) + assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => 20 * i} + assert Enum.at(rows, 1) == %{"key" => ["a", "b"], "value" => 40 * i} + assert Enum.at(rows, 2) == %{"key" => ["a", "c"], "value" => 10 * i} + assert Enum.at(rows, 3) == %{"key" => ["d"], "value" => 10 * i} + assert Enum.at(rows, 4) == %{"key" => ["d", "a"], "value" => 10 * i} + assert Enum.at(rows, 5) == %{"key" => ["d", "b"], "value" => 10 * i} + assert Enum.at(rows, 6) == %{"key" => ["d", "c"], "value" => 10 * i} + end) + + rows = query_rows(ddoc_url, "_ones_array_sum", %{group: true}) + assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => [20 * i, 20 * i]} + assert Enum.at(rows, 1) == %{"key" => ["a", "b"], "value" => [20 * i, 20 * i]} + assert Enum.at(rows, 2) == %{"key" => ["a", "b", "c"], "value" => [10 * i, 10 * i]} + assert Enum.at(rows, 3) == %{"key" => ["a", "b", "d"], "value" => [10 * i, 10 * i]} + + assert [row0, _] = query_rows(ddoc_url, "_ones_array_sum", %{group: true, limit: 2}) + assert row0 == %{"key" => ["a"], "value" => [20 * i, 20 * i]} + + rows = query_rows(ddoc_url, "_ones_array_sum", %{group_level: 1}) + assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => [70 * i, 70 * i]} + assert Enum.at(rows, 1) == %{"key" => ["d"], "value" => [40 * i, 40 * i]} + + rows = query_rows(ddoc_url, "_ones_array_sum", %{group_level: 2}) + assert Enum.at(rows, 0) == %{"key" => ["a"], "value" => [20 * i, 20 * i]} + assert Enum.at(rows, 1) == %{"key" => ["a", "b"], "value" => [40 * i, 40 * i]} + assert Enum.at(rows, 2) == %{"key" => ["a", "c"], "value" => [10 * i, 10 * i]} + assert Enum.at(rows, 3) == %{"key" => ["d"], "value" => [10 * i, 10 * i]} + assert Enum.at(rows, 4) == %{"key" => ["d", "a"], "value" => [10 * i, 10 * i]} + assert Enum.at(rows, 5) == %{"key" => ["d", "b"], "value" => [10 * i, 10 * i]} + assert Enum.at(rows, 6) == %{"key" => ["d", "c"], "value" => [10 * i, 10 * i]} + end + end +end diff --git a/test/elixir/test/reduce_false_test.exs b/test/elixir/test/reduce_false_test.exs new file mode 100644 index 00000000000..3cf4ccb49ef --- /dev/null +++ b/test/elixir/test/reduce_false_test.exs @@ -0,0 +1,51 @@ +defmodule ReduceFalseTest do + use CouchTestCase + + @moduletag :views + @moduletag kind: :single_node + + @moduledoc """ + Test CouchDB view without reduces + This is a port of the reduce_false.js suite + """ + + def summate(n) do + (n + 1) * n / 2 + end + + @tag :with_db + test "Basic reduce functions", context do + db_name = context[:db_name] + view_url = "/#{db_name}/_design/foo/_view/summate" + num_docs = 5 + + map = ~s""" + function (doc) { + emit(doc.integer, doc.integer); + }; + """ + + reduce = "function (keys, values) { return sum(values); };" + red_doc = %{:views => %{:summate => %{:map => map, :reduce => reduce}}} + assert Couch.put("/#{db_name}/_design/foo", body: red_doc).body["ok"] + + docs = make_docs(1..num_docs) + resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs}, query: %{w: 3}) + assert resp.status_code in [201, 202] + + # Test that the reduce works + rows = Couch.get(view_url).body["rows"] + assert length(rows) == 1 + assert hd(rows)["value"] == summate(num_docs) + + # Test that we got our docs back + rows = Couch.get(view_url, query: %{reduce: false}).body["rows"] + assert length(rows) == 5 + + rows + |> Enum.with_index(1) + |> Enum.each(fn {row, i} -> + assert i == row["value"] + end) + end +end diff --git a/test/elixir/test/reduce_test.exs b/test/elixir/test/reduce_test.exs index 22f2fa6f255..7b5641ccfde 100644 --- a/test/elixir/test/reduce_test.exs +++ b/test/elixir/test/reduce_test.exs @@ -2,6 +2,7 @@ defmodule ReduceTest do use CouchTestCase @moduletag :views + @moduletag kind: :single_node @moduledoc """ Test CouchDB view reduces diff --git a/test/elixir/test/replication_test.exs b/test/elixir/test/replication_test.exs index bdd683e9748..7b462bdfc42 100644 --- a/test/elixir/test/replication_test.exs +++ b/test/elixir/test/replication_test.exs @@ -5,9 +5,11 @@ defmodule ReplicationTest do Test CouchDB Replication Behavior This is a port of the view_collation.js suite """ + + @moduletag kind: :cluster + @moduletag :replication # TODO: Parameterize these - @admin_account "adm:pass" @db_pairs_prefixes [ {"remote-to-remote", "http://127.0.0.1:15984/", "http://127.0.0.1:15984/"} ] @@ -1584,30 +1586,6 @@ defmodule ReplicationTest do resp.body end - def replicate(src, tgt, options \\ []) do - {userinfo, options} = Keyword.pop(options, :userinfo) - - userinfo = - if userinfo == nil do - @admin_account - else - userinfo - end - - src = set_user(src, userinfo) - tgt = set_user(tgt, userinfo) - - defaults = [headers: [], body: %{}, timeout: 30_000] - options = defaults |> Keyword.merge(options) |> Enum.into(%{}) - - %{body: body} = options - body = [source: src, target: tgt] |> Enum.into(body) - options = Map.put(options, :body, body) - - resp = Couch.post("/_replicate", Enum.to_list(options)) - assert HTTPotion.Response.success?(resp), "#{inspect(resp)}" - resp.body - end def cancel_replication(src, tgt) do body = %{:cancel => true} @@ -1737,19 +1715,6 @@ defmodule ReplicationTest do end) end - def set_user(uri, userinfo) do - case URI.parse(uri) do - %{scheme: nil} -> - uri - - %{userinfo: nil} = uri -> - URI.to_string(Map.put(uri, :userinfo, userinfo)) - - _ -> - uri - end - end - def get_att1_data do File.read!(Path.expand("data/lorem.txt", __DIR__)) end diff --git a/test/elixir/test/replicator_db_bad_rep_id_test.exs b/test/elixir/test/replicator_db_bad_rep_id_test.exs new file mode 100644 index 00000000000..9477eb183d7 --- /dev/null +++ b/test/elixir/test/replicator_db_bad_rep_id_test.exs @@ -0,0 +1,84 @@ +defmodule ReplicationBadIdTest do + use CouchTestCase + + @moduledoc """ + This is a port of the replicator_db_bad_rep_id.js suite + """ + + @moduletag :replication + @moduletag kind: :cluster + + @docs [ + %{ + _id: "foo1", + value: 11 + }, + %{ + _id: "foo2", + value: 22 + }, + %{ + _id: "foo3", + value: 33 + } + ] + + test "replication doc with bad rep id" do + name = random_db_name() + src_db_name = name <> "_src" + tgt_db_name = name <> "_tgt" + + create_db(src_db_name) + bulk_save(src_db_name, @docs) + create_db(tgt_db_name) + delete_db_on_exit([src_db_name, tgt_db_name]) + + src_db_url = Couch.process_url("/#{src_db_name}") + tgt_db_url = Couch.process_url("/#{tgt_db_name}") + + replication_doc = %{ + _id: "foo_rep_#{name}", + source: src_db_url, + target: tgt_db_url, + replication_id: "1234abc" + } + + {:ok, repdoc} = create_doc("_replicator", replication_doc) + delete_doc_on_exit("_replicator", repdoc.body["id"]) + + retry_until(fn -> + resp = Couch.get("/_replicator/#{replication_doc[:_id]}") + assert resp.body["_replication_state"] == "completed" + resp + end) + + Enum.each(@docs, fn doc -> + copy_resp = Couch.get("/#{tgt_db_name}/#{doc[:_id]}") + assert copy_resp.status_code == 200 + assert copy_resp.body["value"] === doc.value + end) + + resp = Couch.get("/_replicator/#{replication_doc[:_id]}") + assert resp.status_code == 200 + assert resp.body["source"] == replication_doc.source + assert resp.body["target"] == replication_doc.target + assert resp.body["_replication_state"] == "completed" + {:ok, _, _} = DateTime.from_iso8601(resp.body["_replication_state_time"]) + assert resp.body["_replication_id"] == nil + end + + def delete_db_on_exit(db_names) when is_list(db_names) do + on_exit(fn -> + Enum.each(db_names, fn name -> + delete_db(name) + end) + end) + end + + def delete_doc_on_exit(db_name, doc_id) do + on_exit(fn -> + resp = Couch.get("/#{db_name}/#{doc_id}") + Couch.delete("/#{db_name}/#{doc_id}?rev=#{resp.body["_rev"]}") + end) + end +end diff --git a/test/elixir/test/replicator_db_by_doc_id_test.exs b/test/elixir/test/replicator_db_by_doc_id_test.exs new file mode 100644 index 00000000000..681ed02dfae --- /dev/null +++ b/test/elixir/test/replicator_db_by_doc_id_test.exs @@ -0,0 +1,124 @@ +defmodule ReplicatorDBByDocIdTest do + use CouchTestCase + + @moduledoc """ + This is a port of the replicator_db_by_doc_id.js suite + """ + + @moduletag :replication + @moduletag kind: :cluster + + @docs [ + %{ + _id: "foo1", + value: 11 + }, + %{ + _id: "foo2", + value: 22 + }, + %{ + _id: "foo3", + value: 33 + } + ] + + test "replicatior db by doc id" do + name = random_db_name() + src_db_name = name <> "_src" + tgt_db_name = name <> "_tgt" + + create_db(src_db_name) + create_db(tgt_db_name) + delete_db_on_exit([src_db_name, tgt_db_name]) + + # Populate src DB + ddocs = [ + %{ + _id: "_design/mydesign", + language: "javascript" + } + ] + + docs = @docs ++ ddocs + bulk_save(src_db_name, docs) + + src_db_url = Couch.process_url("/#{src_db_name}") + tgt_db_url = build_tgt_uri(tgt_db_name) + + replication_doc = %{ + _id: "foo_cont_rep_#{name}", + source: src_db_url, + target: tgt_db_url, + doc_ids: ["foo666", "foo3", "_design/mydesign", "foo999", "foo1"] + } + + {:ok, repdoc} = create_doc("_replicator", replication_doc) + delete_doc_on_exit("_replicator", repdoc.body["id"]) + + retry_until(fn -> + resp = Couch.get("/_replicator/#{replication_doc[:_id]}") + assert resp.body["_replication_state"] == "completed" + resp + end) + + copy_resp = Couch.get("/#{tgt_db_name}/foo1") + assert copy_resp.status_code == 200 + assert copy_resp.body["value"] === 11 + + copy_resp = Couch.get("/#{tgt_db_name}/foo2") + assert copy_resp.status_code == 404 + + copy_resp = Couch.get("/#{tgt_db_name}/foo3") + assert copy_resp.status_code == 200 + assert copy_resp.body["value"] === 33 + + copy_resp = Couch.get("/#{tgt_db_name}/foo666") + assert copy_resp.status_code == 404 + + copy_resp = Couch.get("/#{tgt_db_name}/foo999") + assert copy_resp.status_code == 404 + + # Javascript test suite was executed with admin party + # the design doc was created during replication. + # Elixir test suite is executed configuring an admin. + # The auth info should be provided for the tgt db in order to + # create the design doc during replication + copy_resp = Couch.get("/#{tgt_db_name}/_design/mydesign") + assert copy_resp.status_code == 200 + + resp = Couch.get("/_replicator/#{replication_doc[:_id]}") + assert resp.status_code == 200 + assert resp.body["_replication_stats"]["revisions_checked"] == 3 + assert resp.body["_replication_stats"]["missing_revisions_found"] == 3 + assert resp.body["_replication_stats"]["docs_read"] == 3 + assert resp.body["_replication_stats"]["docs_written"] == 3 + assert resp.body["_replication_stats"]["doc_write_failures"] == 0 + end + + defp build_tgt_uri(db_name) do + username = System.get_env("EX_USERNAME") || "adm" + password = System.get_env("EX_PASSWORD") || "pass" + + "/#{db_name}" + |> Couch.process_url() + |> URI.parse() + |> Map.put(:userinfo, "#{username}:#{password}") + |> URI.to_string() + end + + def delete_db_on_exit(db_names) when is_list(db_names) do + on_exit(fn -> + Enum.each(db_names, fn name -> + delete_db(name) + end) + end) + end + + def delete_doc_on_exit(db_name, doc_id) do + on_exit(fn -> + resp = Couch.get("/#{db_name}/#{doc_id}") + Couch.delete("/#{db_name}/#{doc_id}?rev=#{resp.body["_rev"]}") + end) + end +end diff --git a/test/elixir/test/reshard_all_docs_test.exs b/test/elixir/test/reshard_all_docs_test.exs index ab8c6b75bae..bd3893dd591 100644 --- a/test/elixir/test/reshard_all_docs_test.exs +++ b/test/elixir/test/reshard_all_docs_test.exs @@ -6,6 +6,9 @@ defmodule ReshardAllDocsTest do Test _all_docs interaction with resharding """ + @moduletag :reshard_all_docs + @moduletag kind: :cluster + setup do db = random_db_name() {:ok, _} = create_db(db, query: %{q: 2}) diff --git a/test/elixir/test/reshard_basic_test.exs b/test/elixir/test/reshard_basic_test.exs index dcb198c4689..6d91818f289 100644 --- a/test/elixir/test/reshard_basic_test.exs +++ b/test/elixir/test/reshard_basic_test.exs @@ -5,6 +5,9 @@ defmodule ReshardBasicTest do @moduledoc """ Test resharding basic functionality """ + + @moduletag :reshard_basic + @moduletag kind: :cluster setup_all do db1 = random_db_name() diff --git a/test/elixir/test/reshard_changes_feed.exs b/test/elixir/test/reshard_changes_feed.exs index 5498ded7bf5..30862753e52 100644 --- a/test/elixir/test/reshard_changes_feed.exs +++ b/test/elixir/test/reshard_changes_feed.exs @@ -6,6 +6,9 @@ defmodule ReshardChangesFeedTest do Test _changes interaction with resharding """ + @moduletag :reshard_changes_feed + @moduletag kind: :cluster + setup do db = random_db_name() {:ok, _} = create_db(db, query: %{q: 2}) diff --git a/test/elixir/test/rev_stemming_test.exs b/test/elixir/test/rev_stemming_test.exs new file mode 100644 index 00000000000..1fb745eadf7 --- /dev/null +++ b/test/elixir/test/rev_stemming_test.exs @@ -0,0 +1,158 @@ +defmodule RevStemmingTest do + use CouchTestCase + + @moduletag :revs + @moduletag kind: :single_node + + @moduledoc """ + This is a port of the rev_stemming.js suite + """ + + @new_limit 5 + + @tag :with_db + test "revs limit update", context do + db_name = context[:db_name] + + resp = Couch.get("/#{db_name}/_revs_limit") + assert resp.body == 1000 + + create_rev_doc(db_name, "foo", @new_limit + 1) + resp = Couch.get("/#{db_name}/foo?revs=true") + assert length(resp.body["_revisions"]["ids"]) == @new_limit + 1 + + resp = + Couch.put("/#{db_name}/_revs_limit", + body: "#{@new_limit}", + headers: ["Content-type": "application/json"] + ) + + assert resp.status_code == 200 + + create_rev_doc(db_name, "foo", @new_limit + 1) + resp = Couch.get("/#{db_name}/foo?revs=true") + assert length(resp.body["_revisions"]["ids"]) == @new_limit + end + + @tag :with_db + test "revs limit produces replication conflict ", context do + db_name = context[:db_name] + + db_name_b = "#{db_name}_b" + create_db(db_name_b) + delete_db_on_exit([db_name_b]) + + resp = + Couch.put("/#{db_name}/_revs_limit", + body: "#{@new_limit}", + headers: ["Content-type": "application/json"] + ) + + assert resp.status_code == 200 + + create_rev_doc(db_name, "foo", @new_limit + 1) + resp = Couch.get("/#{db_name}/foo?revs=true") + assert length(resp.body["_revisions"]["ids"]) == @new_limit + + # If you replicate after you make more edits than the limit, you'll + # cause a spurious edit conflict. + replicate(db_name, db_name_b) + resp = Couch.get("/#{db_name_b}/foo?conflicts=true") + assert not Map.has_key?(resp.body, "_conflicts") + + create_rev_doc(db_name, "foo", @new_limit - 1) + + # one less edit than limit, no conflict + replicate(db_name, db_name_b) + resp = Couch.get("/#{db_name_b}/foo?conflicts=true") + assert not Map.has_key?(resp.body, "_conflicts") + prev_conflicted_rev = resp.body["_rev"] + + # now we hit the limit + create_rev_doc(db_name, "foo", @new_limit + 1) + + replicate(db_name, db_name_b) + resp = Couch.get("/#{db_name_b}/foo?conflicts=true") + assert Map.has_key?(resp.body, "_conflicts") + + conflicted_rev = + resp.body["_conflicts"] + |> Enum.at(0) + + # we have a conflict, but the previous replicated rev is always the losing + # conflict + assert conflicted_rev == prev_conflicted_rev + end + + @tag :with_db + test "revs limit is kept after compaction", context do + db_name = context[:db_name] + + create_rev_doc(db_name, "bar", @new_limit + 1) + resp = Couch.get("/#{db_name}/bar?revs=true") + assert length(resp.body["_revisions"]["ids"]) == @new_limit + 1 + + resp = + Couch.put("/#{db_name}/_revs_limit", + body: "#{@new_limit}", + headers: ["Content-type": "application/json"] + ) + + assert resp.status_code == 200 + + # We having already updated bar before setting the limit, so it's still got + # a long rev history. compact to stem the revs. + resp = Couch.get("/#{db_name}/bar?revs=true") + assert length(resp.body["_revisions"]["ids"]) == @new_limit + + compact(db_name) + + # force reload because ETags don't honour compaction + resp = + Couch.get("/#{db_name}/bar?revs=true", + headers: ["if-none-match": "pommes"] + ) + + assert length(resp.body["_revisions"]["ids"]) == @new_limit + end + + # function to create a doc with multiple revisions + defp create_rev_doc(db_name, id, num_revs) do + resp = Couch.get("/#{db_name}/#{id}") + + doc = + if resp.status_code == 200 do + resp.body + else + %{_id: id, count: 0} + end + + {:ok, resp} = create_doc(db_name, doc) + create_rev_doc(db_name, id, num_revs, [Map.put(doc, :_rev, resp.body["rev"])]) + end + + defp create_rev_doc(db_name, id, num_revs, revs) do + if length(revs) < num_revs do + doc = %{_id: id, _rev: Enum.at(revs, -1)[:_rev], count: length(revs)} + {:ok, resp} = create_doc(db_name, doc) + + create_rev_doc( + db_name, + id, + num_revs, + revs ++ [Map.put(doc, :_rev, resp.body["rev"])] + ) + else + revs + end + end + + def delete_db_on_exit(db_names) when is_list(db_names) do + on_exit(fn -> + Enum.each(db_names, fn name -> + delete_db(name) + end) + end) + end + +end diff --git a/test/elixir/test/rewrite_js_test.exs b/test/elixir/test/rewrite_js_test.exs new file mode 100644 index 00000000000..a3adb3e7d4b --- /dev/null +++ b/test/elixir/test/rewrite_js_test.exs @@ -0,0 +1,411 @@ +defmodule RewriteJSTest do + use CouchTestCase + + @moduletag :js_engine + @moduletag kind: :single_node + + @moduledoc """ + Test CouchDB rewrites JS + This is a port of the rewrite_js.js suite + """ + + @ddoc %{ + _id: "_design/test", + language: "javascript", + _attachments: %{ + "foo.txt": %{ + content_type: "text/plain", + data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=" + } + }, + rewrites: """ + function(req) { + prefix = req.path[4]; + if (prefix === 'foo') { + return 'foo.txt'; + } + if (prefix === 'foo2') { + return {path: 'foo.txt', method: 'GET'}; + } + if (prefix === 'hello') { + if (req.method != 'PUT') { + return + } + id = req.path[5]; + return {path: '_update/hello/' + id}; + } + if (prefix === 'welcome') { + if (req.path.length == 6){ + name = req.path[5]; + return {path: '_show/welcome', query: {'name': name}}; + } + return '_show/welcome'; + } + if (prefix === 'welcome2') { + return {path: '_show/welcome', query: {'name': 'user'}}; + } + if (prefix === 'welcome3') { + name = req.path[5]; + if (req.method == 'PUT') { + path = '_update/welcome2/' + name; + } else if (req.method == 'GET') { + path = '_show/welcome2/' + name; + } else { + return; + } + return path; + } + if (prefix === 'welcome4') { + return {path: '_show/welcome3', query: {name: req.path[5]}}; + } + if (prefix === 'welcome5') { + rest = req.path.slice(5).join('/'); + return {path: '_show/' + rest, query: {name: rest}}; + } + if (prefix === 'basicView') { + rest = req.path.slice(5).join('/'); + return {path: '_view/basicView'}; + } + if (req.path.slice(4).join('/') === 'simpleForm/basicView') { + return {path: '_list/simpleForm/basicView'}; + } + if (req.path.slice(4).join('/') === 'simpleForm/basicViewFixed') { + return {path: '_list/simpleForm/basicView', + query: {startkey: '"3"', endkey: '"8"'}}; + } + if (req.path.slice(4).join('/') === 'simpleForm/complexView') { + return {path: '_list/simpleForm/complexView', + query: {key: JSON.stringify([1,2])}}; + } + if (req.path.slice(4).join('/') === 'simpleForm/complexView2') { + return {path: '_list/simpleForm/complexView', + query: {key: JSON.stringify(['test', {}])}}; + } + if (req.path.slice(4).join('/') === 'simpleForm/complexView3') { + return {path: '_list/simpleForm/complexView', + query: {key: JSON.stringify(['test', ['test', 'essai']])}}; + } + if (req.path.slice(4).join('/') === 'simpleForm/complexView4') { + return {path: '_list/simpleForm/complexView2', + query: {key: JSON.stringify({"c": 1})}}; + } + if (req.path.slice(4).join('/') === 'simpleForm/sendBody1') { + return {path: '_list/simpleForm/complexView2', + method: 'POST', + query: {limit: '1'}, + headers:{'Content-type':'application/json'}, + body: JSON.stringify( {keys: [{"c": 1}]} )}; + } + if (req.path.slice(4).join('/') === '/') { + return {path: '_view/basicView'}; + } + if (prefix === 'db') { + return {path: '../../' + req.path.slice(5).join('/')}; + } + } + """, + lists: %{ + simpleForm: """ + function(head, req) { + send('
    '); + var row, row_number = 0, prevKey, firstKey = null; + while (row = getRow()) { + row_number += 1; + if (!firstKey) firstKey = row.key; + prevKey = row.key; + send('\\n
  • Key: '+row.key + +' Value: '+row.value + +' LineNo: '+row_number+'
  • '); + } + return '

FirstKey: '+ firstKey + ' LastKey: '+ prevKey+'

'; + } + """ + }, + shows: %{ + welcome: """ + function(doc,req) { + return "Welcome " + req.query["name"]; + } + """, + welcome2: """ + function(doc, req) { + return "Welcome " + doc.name; + } + """, + welcome3: """ + function(doc,req) { + return "Welcome " + req.query["name"]; + } + """ + }, + updates: %{ + hello: """ + function(doc, req) { + if (!doc) { + if (req.id) { + return [{ + _id : req.id + }, "New World"] + } + return [null, "Empty World"]; + } + doc.world = "hello"; + doc.edited_by = req.userCtx; + return [doc, "hello doc"]; + } + """, + welcome2: """ + function(doc, req) { + if (!doc) { + if (req.id) { + return [{ + _id: req.id, + name: req.id + }, "New World"] + } + return [null, "Empty World"]; + } + return [doc, "hello doc"]; + } + """ + }, + views: %{ + basicView: %{ + map: """ + function(doc) { + if (doc.integer) { + emit(doc.integer, doc.string); + } + } + """ + }, + complexView: %{ + map: """ + function(doc) { + if (doc.type == "complex") { + emit([doc.a, doc.b], doc.string); + } + } + """ + }, + complexView2: %{ + map: """ + function(doc) { + if (doc.type == "complex") { + emit(doc.a, doc.string); + } + } + """ + }, + complexView3: %{ + map: """ + function(doc) { + if (doc.type == "complex") { + emit(doc.b, doc.string); + } + } + """ + } + } + } + + Enum.each( + ["test_rewrite_suite_db", "test_rewrite_suite_db%2Fwith_slashes"], + fn db_name -> + @tag with_random_db: db_name + test "Test basic js rewrites on #{db_name}", context do + db_name = context[:db_name] + + create_doc(db_name, @ddoc) + + docs1 = make_docs(0..9) + bulk_save(db_name, docs1) + + docs2 = [ + %{"a" => 1, "b" => 1, "string" => "doc 1", "type" => "complex"}, + %{"a" => 1, "b" => 2, "string" => "doc 2", "type" => "complex"}, + %{"a" => "test", "b" => %{}, "string" => "doc 3", "type" => "complex"}, + %{ + "a" => "test", + "b" => ["test", "essai"], + "string" => "doc 4", + "type" => "complex" + }, + %{"a" => %{"c" => 1}, "b" => "", "string" => "doc 5", "type" => "complex"} + ] + + bulk_save(db_name, docs2) + + # Test simple rewriting + resp = Couch.get("/#{db_name}/_design/test/_rewrite/foo") + assert resp.body == "This is a base64 encoded text" + assert resp.headers["Content-Type"] == "text/plain" + + resp = Couch.get("/#{db_name}/_design/test/_rewrite/foo2") + assert resp.body == "This is a base64 encoded text" + assert resp.headers["Content-Type"] == "text/plain" + + # Test POST, hello update world + resp = + Couch.post("/#{db_name}", body: %{"word" => "plankton", "name" => "Rusty"}).body + + assert resp["ok"] + doc_id = resp["id"] + assert doc_id + + resp = Couch.put("/#{db_name}/_design/test/_rewrite/hello/#{doc_id}") + assert resp.status_code in [201, 202] + assert resp.body == "hello doc" + assert String.match?(resp.headers["Content-Type"], ~r/charset=utf-8/) + + assert Couch.get("/#{db_name}/#{doc_id}").body["world"] == "hello" + + resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome?name=user") + assert resp.body == "Welcome user" + + resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome/user") + assert resp.body == "Welcome user" + + resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome2") + assert resp.body == "Welcome user" + + resp = Couch.put("/#{db_name}/_design/test/_rewrite/welcome3/test") + assert resp.status_code in [201, 202] + assert resp.body == "New World" + assert String.match?(resp.headers["Content-Type"], ~r/charset=utf-8/) + + resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome3/test") + assert resp.body == "Welcome test" + + resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome4/user") + assert resp.body == "Welcome user" + + resp = Couch.get("/#{db_name}/_design/test/_rewrite/welcome5/welcome3") + assert resp.body == "Welcome welcome3" + + resp = Couch.get("/#{db_name}/_design/test/_rewrite/basicView") + assert resp.status_code == 200 + assert resp.body["total_rows"] == 9 + + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/FirstKey: [1, 2]/) + + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView2") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Value: doc 3/) + + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView3") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Value: doc 4/) + + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView4") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Value: doc 5/) + + # COUCHDB-1612 - send body rewriting get to post + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/sendBody1") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Value: doc 5 LineNo: 1/) + + resp = Couch.get("/#{db_name}/_design/test/_rewrite/db/_design/test?meta=true") + assert resp.status_code == 200 + assert resp.body["_id"] == "_design/test" + assert Map.has_key?(resp.body, "_revs_info") + end + + @tag with_random_db: db_name + test "early response on #{db_name}", context do + db_name = context[:db_name] + + ddoc = %{ + _id: "_design/response", + rewrites: """ + function(req){ + status = parseInt(req.query.status); + return {code: status, + body: JSON.stringify({"status": status}), + headers: {'x-foo': 'bar', 'Content-Type': 'application/json'}}; + } + """ + } + + create_doc(db_name, ddoc) + + resp = Couch.get("/#{db_name}/_design/response/_rewrite?status=200") + assert resp.status_code == 200 + assert resp.headers["x-foo"] == "bar" + assert resp.body["status"] == 200 + + resp = Couch.get("/#{db_name}/_design/response/_rewrite?status=451") + assert resp.status_code == 451 + assert resp.headers["Content-Type"] == "application/json" + + resp = Couch.get("/#{db_name}/_design/response/_rewrite?status=500") + assert resp.status_code == 500 + end + + @tag with_random_db: db_name + test "path relative to server on #{db_name}", context do + db_name = context[:db_name] + + ddoc = %{ + _id: "_design/relative", + rewrites: """ + function(req){ + return '../../../_uuids' + } + """ + } + + create_doc(db_name, ddoc) + resp = Couch.get("/#{db_name}/_design/relative/_rewrite/uuids") + assert resp.status_code == 200 + assert length(resp.body["uuids"]) == 1 + end + + @tag with_random_db: db_name + test "loop on #{db_name}", context do + db_name = context[:db_name] + + ddoc_loop = %{ + _id: "_design/loop", + rewrites: """ + function(req) { + return '_rewrite/loop'; + } + """ + } + + create_doc(db_name, ddoc_loop) + resp = Couch.get("/#{db_name}/_design/loop/_rewrite/loop") + assert resp.status_code == 400 + end + + @tag with_random_db: db_name + test "requests with body preserve the query string rewrite on #{db_name}", + context do + db_name = context[:db_name] + + ddoc_qs = %{ + _id: "_design/qs", + rewrites: + "function (r) { return {path: '../../_changes', query: {'filter': '_doc_ids'}};};" + } + + create_doc(db_name, ddoc_qs) + create_doc(db_name, %{_id: "qs1"}) + create_doc(db_name, %{_id: "qs2"}) + + resp = + Couch.post("/#{db_name}/_design/qs/_rewrite", + body: %{doc_ids: ["qs2"]} + ) + + assert resp.status_code == 200 + assert length(resp.body["results"]) == 1 + assert Enum.at(resp.body["results"], 0)["id"] == "qs2" + end + end + ) +end diff --git a/test/elixir/test/rewrite_test.exs b/test/elixir/test/rewrite_test.exs index 1960ddfde66..75f198568f7 100644 --- a/test/elixir/test/rewrite_test.exs +++ b/test/elixir/test/rewrite_test.exs @@ -2,6 +2,7 @@ defmodule RewriteTest do use CouchTestCase @moduletag :js_engine + @moduletag kind: :single_node @moduledoc """ Test CouchDB rewrites @@ -348,15 +349,178 @@ defmodule RewriteTest do assert resp.status_code == 200 assert resp.body["total_rows"] == 9 - # TODO: port _list function tests and everything below in rewrite.js - # This is currently broken because _list funcitons default to application/json - # response bodies and my attempts to change the content-type from within the - # _list function have not yet succeeded. - # - # Test GET with query params - # resp = Couch.get("/#{db_name}/_design/test/_rewrite/simpleForm/basicView", query: %{startkey: 3, endkey: 8}) - # Logger.error("GOT RESP: #{inspect resp.body}") - # assert resp.status_code == 200 + resp = + Rawresp.get( + "/#{db_name}/_design/test/_rewrite/simpleForm/basicView?startkey=3&endkey=8" + ) + + assert resp.status_code == 200 + assert not String.match?(resp.body, ~r/Key: 1/) + assert String.match?(resp.body, ~r/FirstKey: 3/) + assert String.match?(resp.body, ~r/LastKey: 8/) + + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/basicViewFixed") + assert resp.status_code == 200 + assert not String.match?(resp.body, ~r/Key: 1/) + assert String.match?(resp.body, ~r/FirstKey: 3/) + assert String.match?(resp.body, ~r/LastKey: 8/) + + resp = + Rawresp.get( + "/#{db_name}/_design/test/_rewrite/simpleForm/basicViewFixed?startkey=4" + ) + + assert resp.status_code == 200 + assert not String.match?(resp.body, ~r/Key: 1/) + assert String.match?(resp.body, ~r/FirstKey: 3/) + assert String.match?(resp.body, ~r/LastKey: 8/) + + resp = + Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/basicViewPath/3/8") + + assert resp.status_code == 200 + assert not String.match?(resp.body, ~r/Key: 1/) + assert String.match?(resp.body, ~r/FirstKey: 3/) + assert String.match?(resp.body, ~r/LastKey: 8/) + + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/FirstKey: [1, 2]/) + + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView2") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Value: doc 3/) + + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView3") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Value: doc 4/) + + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/simpleForm/complexView4") + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Value: doc 5/) + + resp = + Rawresp.get( + "/#{db_name}/_design/test/_rewrite/simpleForm/complexView5/test/essai" + ) + + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Value: doc 4/) + + resp = + Rawresp.get( + "/#{db_name}/_design/test/_rewrite/simpleForm/complexView6?a=test&b=essai" + ) + + assert resp.status_code == 200 + assert String.match?(resp.body, ~r/Value: doc 4/) + + resp = + Rawresp.get( + "/#{db_name}/_design/test/_rewrite/simpleForm/complexView7/test/essai?doc=true" + ) + + assert resp.status_code == 200 + result = resp.body |> IO.iodata_to_binary() |> :jiffy.decode([:return_maps]) + first_row = Enum.at(result["rows"], 0) + assert Map.has_key?(first_row, "doc") + + # COUCHDB-2031 - path normalization versus qs params + resp = Rawresp.get("/#{db_name}/_design/test/_rewrite/db/_design/test?meta=true") + assert resp.status_code == 200 + result = resp.body |> IO.iodata_to_binary() |> :jiffy.decode([:return_maps]) + assert result["_id"] == "_design/test" + assert Map.has_key?(result, "_revs_info") + + ddoc2 = %{ + _id: "_design/test2", + rewrites: [ + %{ + from: "uuids", + to: "../../../_uuids" + } + ] + } + + create_doc(db_name, ddoc2) + resp = Couch.get("/#{db_name}/_design/test2/_rewrite/uuids") + assert resp.status_code == 500 + assert resp.body["error"] == "insecure_rewrite_rule" + end + + @tag with_random_db: db_name + @tag config: [ + {"httpd", "secure_rewrites", "false"} + ] + test "path relative to server on #{db_name}", context do + db_name = context[:db_name] + + ddoc = %{ + _id: "_design/test2", + rewrites: [ + %{ + from: "uuids", + to: "../../../_uuids" + } + ] + } + + create_doc(db_name, ddoc) + + resp = Couch.get("/#{db_name}/_design/test2/_rewrite/uuids") + assert resp.status_code == 200 + assert length(resp.body["uuids"]) == 1 + end + + @tag with_random_db: db_name + @tag config: [ + {"httpd", "rewrite_limit", "2"} + ] + test "loop detection on #{db_name}", context do + db_name = context[:db_name] + + ddoc_loop = %{ + _id: "_design/loop", + rewrites: [%{from: "loop", to: "_rewrite/loop"}] + } + + create_doc(db_name, ddoc_loop) + + resp = Couch.get("/#{db_name}/_design/loop/_rewrite/loop") + assert resp.status_code == 400 + end + + @tag with_random_db: db_name + @tag config: [ + {"httpd", "rewrite_limit", "2"}, + {"httpd", "secure_rewrites", "false"} + ] + test "serial execution is not spuriously counted as loop on #{db_name}", context do + db_name = context[:db_name] + + ddoc = %{ + _id: "_design/test", + language: "javascript", + _attachments: %{ + "foo.txt": %{ + content_type: "text/plain", + data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=" + } + }, + rewrites: [ + %{ + from: "foo", + to: "foo.txt" + } + ] + } + + create_doc(db_name, ddoc) + + for _i <- 0..4 do + resp = Couch.get("/#{db_name}/_design/test/_rewrite/foo") + assert resp.status_code == 200 + end end end ) diff --git a/test/elixir/test/security_validation_test.exs b/test/elixir/test/security_validation_test.exs index 0df3a780ba8..4b3b4fa9e1f 100644 --- a/test/elixir/test/security_validation_test.exs +++ b/test/elixir/test/security_validation_test.exs @@ -2,6 +2,7 @@ defmodule SecurityValidationTest do use CouchTestCase @moduletag :security + @moduletag kind: :single_node @moduledoc """ Test CouchDB Security Validations @@ -20,6 +21,10 @@ defmodule SecurityValidationTest do spike_cat: [ # spike:cat - which is wrong authorization: "Basic c3Bpa2U6Y2F0" + ], + spike: [ + # spike:dog + authorization: "Basic c3Bpa2U6ZG9n" ] } @@ -148,6 +153,15 @@ defmodule SecurityValidationTest do assert resp.body["userCtx"]["roles"] == [] end + @tag :with_db + test "try to set a wrong value for _security", context do + db_name = context[:db_name] + # try to do something lame + resp = Couch.put("/#{db_name}/_security", body: ["foo"]) + assert resp.status_code == 400 + assert resp.body["error"] == "bad_request" + end + @tag :with_db test "Author presence and user security", context do db_name = context[:db_name] @@ -180,6 +194,12 @@ defmodule SecurityValidationTest do assert resp.status_code == 403 assert resp.body["error"] == "forbidden" + # Admin cannot write the document (admin_override = false) + test_doc = Map.put(test_doc, "foo", 3) + resp = Couch.put("/#{db_name}/test_doc", body: test_doc) + assert resp.status_code == 401 + assert resp.body["error"] == "unauthorized" + # Enable admin override for changing author values assert Couch.put("/#{db_name}/_security", body: %{sec_obj | admin_override: true}).body[ "ok" @@ -203,136 +223,103 @@ defmodule SecurityValidationTest do resp = Couch.delete("/#{db_name}/test_doc?rev=#{test_doc["_rev"]}", opts) resp.status_code == 401 and resp.body["error"] == "unauthorized" end) + + # Admin can write the document (admin_override = true) + test_doc = Map.put(test_doc, "foo", 4) + resp = Couch.put("/#{db_name}/test_doc", body: test_doc) + assert resp.body["ok"] + + # Disable admin override + assert Couch.put("/#{db_name}/_security", body: %{sec_obj | admin_override: false}).body[ + "ok" + ] + + docs = [%{_id: "bahbah", author: "jerry", foo: "bar"}, %{_id: "fahfah", foo: "baz"}] + + resp = + Couch.post( + "/#{db_name}/_bulk_docs", + body: %{ + docs: docs + }, + headers: jerry + ) + + assert Enum.at(resp.body, 0)["rev"] + assert !Enum.at(resp.body, 0)["error"] + assert !Enum.at(resp.body, 1)["rev"] + assert Enum.at(resp.body, 1)["error"] == "forbidden" + + resp = Couch.get("/#{db_name}/bahbah") + assert resp.status_code == 200 + + resp = Couch.get("/#{db_name}/fahfah") + assert resp.status_code == 404 end -end -# TODO: port remainder of security_validation.js suite -# remaining bits reproduced below: -# -# // try to do something lame -# try { -# db.setDbProperty("_security", ["foo"]); -# T(false && "can't do this"); -# } catch(e) {} -# -# // go back to normal -# T(db.setDbProperty("_security", {admin_override : false}).ok); -# -# // Now delete document -# T(user2Db.deleteDoc(doc).ok); -# -# // now test bulk docs -# var docs = [{_id:"bahbah",author:"jerry",foo:"bar"},{_id:"fahfah",foo:"baz"}]; -# -# // Create the docs -# var results = db.bulkSave(docs); -# -# T(results[0].rev) -# T(results[0].error == undefined) -# T(results[1].rev === undefined) -# T(results[1].error == "forbidden") -# -# T(db.open("bahbah")); -# T(db.open("fahfah") == null); -# -# -# // now all or nothing with a failure - no more available on cluster -# /* var docs = [ -# {_id:"booboo",author:"Damien Katz",foo:"bar"},{_id:"foofoo",foo:"baz"} -# ]; -# -# // Create the docs -# var results = db.bulkSave(docs, {all_or_nothing:true}); -# -# T(results.errors.length == 1); -# T(results.errors[0].error == "forbidden"); -# T(db.open("booboo") == null); -# T(db.open("foofoo") == null); -# */ -# -# // Now test replication -# var AuthHeaders = {"Authorization": "Basic c3Bpa2U6ZG9n"}; // spike -# adminDbA = new CouchDB("" + db_name + "_a", {"X-Couch-Full-Commit":"false"}); -# adminDbB = new CouchDB("" + db_name + "_b", {"X-Couch-Full-Commit":"false"}); -# var dbA = new CouchDB("" + db_name + "_a", AuthHeaders); -# var dbB = new CouchDB("" + db_name + "_b", AuthHeaders); -# // looping does not really add value as the scenario is the same anyway -# // (there's nothing 2 be gained from it) -# var A = CouchDB.protocol + CouchDB.host + "/" + db_name + "_a"; -# var B = CouchDB.protocol + CouchDB.host + "/" + db_name + "_b"; -# -# // (the databases never exist b4 - and we made sure they're deleted below) -# //adminDbA.deleteDb(); -# adminDbA.createDb(); -# //adminDbB.deleteDb(); -# adminDbB.createDb(); -# -# // save and replicate a documents that will and will not pass our design -# // doc validation function. -# T(dbA.save({_id:"foo1",value:"a",author:"tom"}).ok); -# T(dbA.save({_id:"foo2",value:"a",author:"spike"}).ok); -# T(dbA.save({_id:"bad1",value:"a"}).ok); -# -# T(CouchDB.replicate(A, B, {headers:AuthHeaders}).ok); -# T(CouchDB.replicate(B, A, {headers:AuthHeaders}).ok); -# -# T(dbA.open("foo1")); -# T(dbB.open("foo1")); -# T(dbA.open("foo2")); -# T(dbB.open("foo2")); -# -# // save the design doc to dbA -# delete designDoc._rev; // clear rev from previous saves -# T(adminDbA.save(designDoc).ok); -# -# // no affect on already saved docs -# T(dbA.open("bad1")); -# -# // Update some docs on dbB. Since the design hasn't replicated, anything -# // is allowed. -# -# // this edit will fail validation on replication to dbA (no author) -# T(dbB.save({_id:"bad2",value:"a"}).ok); -# -# // this edit will fail security on replication to dbA (wrong author -# // replicating the change) -# var foo1 = dbB.open("foo1"); -# foo1.value = "b"; -# T(dbB.save(foo1).ok); -# -# // this is a legal edit -# var foo2 = dbB.open("foo2"); -# foo2.value = "b"; -# T(dbB.save(foo2).ok); -# -# var results = CouchDB.replicate({"url": B, "headers": AuthHeaders}, -# {"url": A, "headers": AuthHeaders}, {headers:AuthHeaders}); -# T(results.ok); -# TEquals(1, results.history[0].docs_written); -# TEquals(2, results.history[0].doc_write_failures); -# -# // bad2 should not be on dbA -# T(dbA.open("bad2") == null); -# -# // The edit to foo1 should not have replicated. -# T(dbA.open("foo1").value == "a"); -# -# // The edit to foo2 should have replicated. -# T(dbA.open("foo2").value == "b"); -# }); -# -# // cleanup -# db.deleteDb(); -# if(adminDbA){ -# adminDbA.deleteDb(); -# } -# if(adminDbB){ -# adminDbB.deleteDb(); -# } -# authDb.deleteDb(); -# // have to clean up authDb on the backside :( -# var req = CouchDB.newXhr(); -# req.open("DELETE", "http://127.0.0.1:15986/" + authDb_name, false); -# req.send(""); -# CouchDB.maybeThrowError(req); -# }; + test "Author presence and user security when replicated", _context do + db_name = random_db_name() + db_name_a = "#{db_name}_a" + db_name_b = "#{db_name}_b" + create_db(db_name_a) + create_db(db_name_b) + on_exit(fn -> delete_db(db_name_a) end) + on_exit(fn -> delete_db(db_name_b) end) + + spike = @auth_headers[:spike] + + # save and replicate a documents that will and will not pass our design + # doc validation function. + {:ok, _} = create_doc(db_name_a, %{_id: "foo1", value: "a", author: "tom"}) + {:ok, _} = create_doc(db_name_a, %{_id: "foo2", value: "a", author: "spike"}) + {:ok, _} = create_doc(db_name_a, %{_id: "bad1", value: "a"}) + replicate(db_name_a, db_name_b, headers: spike) + replicate(db_name_b, db_name_a, headers: spike) + + assert Couch.get("/#{db_name_a}/foo1").status_code == 200 + assert Couch.get("/#{db_name_b}/foo1").status_code == 200 + assert Couch.get("/#{db_name_a}/foo2").status_code == 200 + assert Couch.get("/#{db_name_b}/foo2").status_code == 200 + + {:ok, _} = create_doc(db_name_a, @ddoc) + + # no affect on already saved docs + assert Couch.get("/#{db_name_a}/bad1").status_code == 200 + + # Update some docs on dbB. Since the design hasn't replicated, anything + # is allowed. + + # this edit will fail validation on replication to dbA (no author) + assert Couch.post( + "/#{db_name_b}", + body: %{id: "bad2", value: "a"}, + headers: spike + ).body["ok"] + + # this edit will fail security on replication to dbA (wrong author + # replicating the change) + foo1 = Couch.get("/#{db_name_b}/foo1").body + foo1 = Map.put(foo1, "value", "b") + assert Couch.put("/#{db_name_b}/foo1", body: foo1, headers: spike).body["ok"] + + # this is a legal edit + foo2 = Couch.get("/#{db_name_b}/foo2").body + foo2 = Map.put(foo2, "value", "b") + assert Couch.put("/#{db_name_b}/foo2", body: foo2, headers: spike).body["ok"] + + result = replicate(db_name_b, db_name_a, headers: spike) + assert Enum.at(result["history"], 0)["docs_written"] == 1 + assert Enum.at(result["history"], 0)["doc_write_failures"] == 2 + + # bad2 should not be on dbA + assert Couch.get("/#{db_name_a}/bad2").status_code == 404 + + # The edit to foo1 should not have replicated. + resp = Couch.get("/#{db_name_a}/foo1") + assert resp.body["value"] == "a" + + # The edit to foo2 should have replicated. + resp = Couch.get("/#{db_name_a}/foo2") + assert resp.body["value"] == "b" + end +end diff --git a/test/elixir/test/show_documents_test.exs b/test/elixir/test/show_documents_test.exs new file mode 100644 index 00000000000..a574c72b18d --- /dev/null +++ b/test/elixir/test/show_documents_test.exs @@ -0,0 +1,448 @@ +defmodule ShowDocumentsTest do + use CouchTestCase + + @moduletag kind: :single_node + + @ddoc %{ + _id: "_design/template", + language: "javascript", + shows: %{ + hello: """ + function(doc, req) { + if (doc) { + return "Hello World"; + } else { + if(req.id) { + return "New World"; + } else { + return "Empty World"; + } + } + } + """, + "just-name": """ + function(doc, req) { + if (doc) { + return { + body : "Just " + doc.name + }; + } else { + return { + body : "No such doc", + code : 404 + }; + } + } + """, + json: """ + function(doc, req) { + return { + json : doc + } + } + """, + "req-info": """ + function(doc, req) { + return { + json : req + } + } + """, + "show-deleted": """ + function(doc, req) { + if(doc) { + return doc._id; + } else { + return "No doc " + req.id; + } + } + """, + "render-error": """ + function(doc, req) { + return noSuchVariable; + } + """, + empty: """ + function(doc, req) { + return ""; + } + """, + fail: """ + function(doc, req) { + return doc._id; + } + """, + "no-set-etag": """ + function(doc, req) { + return { + headers : { + "Etag" : "skipped" + }, + "body" : "something" + } + } + """, + "list-api": """ + function(doc, req) { + start({"X-Couch-Test-Header": "Yeah"}); + send("Hey"); + } + """, + "list-api-provides": """ + function(doc, req) { + provides("text", function(){ + send("foo, "); + send("bar, "); + send("baz!"); + }) + } + """, + "list-api-provides-and-return": """ + function(doc, req) { + provides("text", function(){ + send("4, "); + send("5, "); + send("6, "); + return "7!"; + }) + send("1, "); + send("2, "); + return "3, "; + } + """, + "list-api-mix": """ + function(doc, req) { + start({"X-Couch-Test-Header": "Yeah"}); + send("Hey "); + return "Dude"; + } + """, + "list-api-mix-with-header": """ + function(doc, req) { + start({"X-Couch-Test-Header": "Yeah"}); + send("Hey "); + return { + headers: { + "X-Couch-Test-Header-Awesome": "Oh Yeah!" + }, + body: "Dude" + }; + } + """, + "accept-switch": """ + function(doc, req) { + if (req.headers["Accept"].match(/image/)) { + return { + // a 16x16 px version of the CouchDB logo + "base64" : + ["iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAsV", + "BMVEUAAAD////////////////////////5ur3rEBn////////////////wDBL/", + "AADuBAe9EB3IEBz/7+//X1/qBQn2AgP/f3/ilpzsDxfpChDtDhXeCA76AQH/v7", + "/84eLyWV/uc3bJPEf/Dw/uw8bRWmP1h4zxSlD6YGHuQ0f6g4XyQkXvCA36MDH6", + "wMH/z8/yAwX64ODeh47BHiv/Ly/20dLQLTj98PDXWmP/Pz//39/wGyJ7Iy9JAA", + "AADHRSTlMAbw8vf08/bz+Pv19jK/W3AAAAg0lEQVR4Xp3LRQ4DQRBD0QqTm4Y5", + "zMxw/4OleiJlHeUtv2X6RbNO1Uqj9g0RMCuQO0vBIg4vMFeOpCWIWmDOw82fZx", + "vaND1c8OG4vrdOqD8YwgpDYDxRgkSm5rwu0nQVBJuMg++pLXZyr5jnc1BaH4GT", + "LvEliY253nA3pVhQqdPt0f/erJkMGMB8xucAAAAASUVORK5CYII="].join(''), + headers : { + "Content-Type" : "image/png", + "Vary" : "Accept" // we set this for proxy caches + } + }; + } else { + return { + "body" : "accepting text requests", + headers : { + "Content-Type" : "text/html", + "Vary" : "Accept" + } + }; + } + } + """, + provides: """ + function(doc, req) { + registerType("foo", "application/foo","application/x-foo"); + + provides("html", function() { + return "Ha ha, you said \\"" + doc.word + "\\"."; + }); + + provides("foo", function() { + return "foofoo"; + }); + } + """, + withSlash: """ + function(doc, req) { + return { json: doc } + } + """, + secObj: """ + function(doc, req) { + return { json: req.secObj }; + } + """ + } + } + + setup_all do + db_name = random_db_name() + {:ok, _} = create_db(db_name) + on_exit(fn -> delete_db(db_name) end) + + {:ok, _} = create_doc(db_name, @ddoc) + + create_doc(db_name, %{_id: "test-doc-id", word: "plankton", name: "Rusty"}) + + {:ok, [db_name: db_name]} + end + + test "show error", context do + db_name = context[:db_name] + + resp = Couch.get("/#{db_name}/_design/template/_show/") + assert resp.status_code == 404 + assert resp.body["reason"] == "Invalid path." + end + + test "show with existing doc", context do + db_name = context[:db_name] + + resp = Rawresp.get("/#{db_name}/_design/template/_show/hello/test-doc-id") + assert resp.body == "Hello World" + assert String.match?(resp.headers["Content-Type"], ~r/charset=utf-8/) + + # Fix for COUCHDB-379 + assert String.match?(resp.headers["Server"], ~r/^CouchDB/) + end + + test "show without docid", context do + db_name = context[:db_name] + resp = Rawresp.get("/#{db_name}/_design/template/_show/hello") + assert resp.body == "Empty World" + + resp = Rawresp.get("/#{db_name}/_design/template/_show/empty") + assert resp.body == "" + end + + test "show fail with non-existing docid", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design/template/_show/fail/nonExistingDoc") + assert resp.status_code == 404 + assert resp.body["error"] == "not_found" + end + + test "show with doc", context do + db_name = context[:db_name] + resp = Rawresp.get("/#{db_name}/_design/template/_show/just-name/test-doc-id") + assert resp.body == "Just Rusty" + end + + test "show with missing doc", context do + db_name = context[:db_name] + resp = Rawresp.get("/#{db_name}/_design/template/_show/just-name/missingdoc") + assert resp.status_code == 404 + assert resp.body == "No such doc" + end + + test "missing design doc", context do + db_name = context[:db_name] + resp = Couch.get("/#{db_name}/_design/missingddoc/_show/just-name/test-doc-id") + assert resp.status_code == 404 + assert resp.body["error"] == "not_found" + end + + test "show query parameters", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design/template/_show/req-info/test-doc-id?foo=bar", + headers: [Accept: "text/html;text/plain;*/*", "X-Foo": "bar"] + ) + + assert resp.body["headers"]["X-Foo"] == "bar" + assert resp.body["query"] == %{"foo" => "bar"} + assert resp.body["method"] == "GET" + assert Enum.at(resp.body["path"], 5) == "test-doc-id" + assert resp.body["info"]["db_name"] == db_name + end + + test "accept header switching - different mime has different etag", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design/template/_show/accept-switch/test-doc-id", + headers: [Accept: "text/html;text/plain;*/*"] + ) + + assert String.match?(resp.headers["Content-Type"], ~r/text\/html/) + assert resp.headers["Vary"] == "Accept" + + etag = resp.headers["etag"] + + resp = + Rawresp.get("/#{db_name}/_design/template/_show/accept-switch/test-doc-id", + headers: [Accept: "image/png;*/*"] + ) + + assert String.match?(resp.body, ~r/PNG/) + assert resp.headers["Content-Type"] == "image/png" + + etag2 = resp.headers["etag"] + + assert etag != etag2 + end + + test "show with doc - etags", context do + db_name = context[:db_name] + + doc = %{"_id" => "test-doc-id2", word: "plankton", name: "Rusty"} + doc = save(db_name, doc) + + resp = Couch.get("/#{db_name}/_design/template/_show/just-name/test-doc-id2") + + etag = resp.headers["etag"] + + resp = + Couch.get("/#{db_name}/_design/template/_show/just-name/test-doc-id2", + headers: ["if-none-match": etag] + ) + + assert resp.status_code == 304 + + doc = Map.put(doc, "name", "Crusty") + save(db_name, doc) + + resp = + Couch.get("/#{db_name}/_design/template/_show/just-name/test-doc-id2", + headers: ["if-none-match": etag] + ) + + assert resp.status_code == 200 + end + + test "JS can't set etag", context do + db_name = context[:db_name] + + resp = Couch.get("/#{db_name}/_design/template/_show/no-set-etag/test-doc-id") + assert resp.headers["etag"] != "skipped" + end + + test "the provides mime matcher", context do + db_name = context[:db_name] + + resp = + Rawresp.get("/#{db_name}/_design/template/_show/provides/test-doc-id", + headers: [Accept: "text/html,application/atom+xml; q=0.9"] + ) + + assert String.match?(resp.headers["Content-Type"], ~r/text\/html/) + assert String.match?(resp.headers["Content-Type"], ~r/charset=utf-8/) + assert resp.body == "Ha ha, you said \"plankton\"." + end + + test "registering types works", context do + db_name = context[:db_name] + + resp = + Rawresp.get("/#{db_name}/_design/template/_show/provides/test-doc-id", + headers: [Accept: "application/x-foo"] + ) + + assert resp.headers["Content-Type"] == "application/x-foo" + assert String.match?(resp.body, ~r/foofoo/) + end + + test "the provides mime matcher without a match", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design/template/_show/provides/test-doc-id", + headers: [Accept: "text/monkeys"] + ) + + assert resp.body["error"] == "not_acceptable" + end + + test "id with slash", context do + db_name = context[:db_name] + + doc3 = %{"_id" => "a/b/c", "a" => 1} + save(db_name, doc3) + resp = Couch.get("/#{db_name}/_design/template/_show/withSlash/a/b/c") + assert resp.status_code == 200 + end + + test "show with non-existing docid", context do + db_name = context[:db_name] + + resp = Rawresp.get("/#{db_name}/_design/template/_show/hello/nonExistingDoc") + assert resp.body == "New World" + end + + test "list() compatible API", context do + db_name = context[:db_name] + + resp = Rawresp.get("/#{db_name}/_design/template/_show/list-api/foo") + assert resp.body == "Hey" + assert resp.headers["X-Couch-Test-Header"] == "Yeah" + end + + test "list() compatible API with provides function", context do + db_name = context[:db_name] + + resp = + Rawresp.get("/#{db_name}/_design/template/_show/list-api-provides/foo?format=text") + + assert resp.body == "foo, bar, baz!" + end + + test "should keep next result order: chunks + return value + provided chunks + provided return value", + context do + db_name = context[:db_name] + + resp = + Rawresp.get( + "/#{db_name}/_design/template/_show/list-api-provides-and-return/foo?format=text" + ) + + assert resp.body == "1, 2, 3, 4, 5, 6, 7!" + + resp = Rawresp.get("/#{db_name}/_design/template/_show/list-api-mix/foo") + assert resp.body == "Hey Dude" + assert resp.headers["X-Couch-Test-Header"] == "Yeah" + + resp = Rawresp.get("/#{db_name}/_design/template/_show/list-api-mix-with-header/foo") + assert resp.body == "Hey Dude" + assert resp.headers["X-Couch-Test-Header"] == "Yeah" + assert resp.headers["X-Couch-Test-Header-Awesome"] == "Oh Yeah!" + end + + test "deleted docs", context do + db_name = context[:db_name] + + doc = save(db_name, %{"_id" => "testdoc", "foo" => 1}) + + resp = Rawresp.get("/#{db_name}/_design/template/_show/show-deleted/testdoc") + assert resp.body == "testdoc" + + Couch.delete("/#{db_name}/testdoc?rev=#{doc["_rev"]}") + resp = Rawresp.get("/#{db_name}/_design/template/_show/show-deleted/testdoc") + assert resp.body == "No doc testdoc" + end + + @tag :with_db + test "security object", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @ddoc) + {:ok, _} = create_doc(db_name, %{_id: "testdoc", foo: 1}) + + Couch.put("/#{db_name}/_security", body: %{foo: true}) + + retry_until(fn -> + resp = Couch.get("/#{db_name}/_design/template/_show/secObj") + assert resp.body["foo"] + end) + end +end diff --git a/test/elixir/test/update_documents_test.exs b/test/elixir/test/update_documents_test.exs new file mode 100644 index 00000000000..bd5e0f1ccb5 --- /dev/null +++ b/test/elixir/test/update_documents_test.exs @@ -0,0 +1,327 @@ +defmodule UpdateDocumentsTest do + use CouchTestCase + + @moduletag :docs + @moduletag kind: :single_node + + @ddoc %{ + _id: "_design/update", + language: "javascript", + updates: %{ + hello: """ + function(doc, req) { + if (!doc) { + if (req.id) { + return [ + // Creates a new document with the PUT docid, + { _id : req.id, + reqs : [req] }, + // and returns an HTML response to the client. + "

New World

"]; + }; + // + return [null, "

Empty World

"]; + }; + // we can update the document inline + doc.world = "hello"; + // we can record aspects of the request or use them in application logic. + doc.reqs && doc.reqs.push(req); + doc.edited_by = req.userCtx; + return [doc, "

hello doc

"]; + } + """, + "in-place": """ + function(doc, req) { + var field = req.query.field; + var value = req.query.value; + var message = "set "+field+" to "+value; + doc[field] = value; + return [doc, message]; + } + """, + "form-update": """ + function(doc, req) { + for (var field in req.form) { + doc[field] = req.form[field]; + } + var message = "updated doc from form"; + return [doc, message]; + } + """, + "bump-counter": """ + function(doc, req) { + if (!doc.counter) doc.counter = 0; + doc.counter += 1; + var message = "

bumped it!

"; + return [doc, message]; + } + """, + error: """ + function(doc, req) { + superFail.badCrash; + } + """, + "get-uuid": """ + function(doc, req) { + return [null, req.uuid]; + } + """, + "code-n-bump": """ + function(doc,req) { + if (!doc.counter) doc.counter = 0; + doc.counter += 1; + var message = "

bumped it!

"; + resp = {"code": 302, "body": message} + return [doc, resp]; + } + """, + "resp-code": """ + function(doc,req) { + resp = {"code": 302} + return [null, resp]; + } + """, + "resp-code-and-json": """ + function(doc,req) { + resp = {"code": 302, "json": {"ok": true}} + return [{"_id": req["uuid"]}, resp]; + } + """, + binary: """ + function(doc, req) { + var resp = { + "headers" : { + "Content-Type" : "application/octet-stream" + }, + "base64" : "aGVsbG8gd29ybGQh" // "hello world!" encoded + }; + return [doc, resp]; + } + """, + empty: """ + function(doc, req) { + return [{}, 'oops']; + } + """ + } + } + + @document %{word: "plankton", name: "Rusty"} + + @tag :with_db + test "update error invalid path", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + resp = Couch.post("/#{db_name}/_design/update/_update/") + assert resp.status_code == 404 + assert resp.body["reason"] == "Invalid path." + end + + @tag :with_db + test "update document", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + {:ok, resp} = create_doc(db_name, @document) + docid = resp.body["id"] + + resp = Couch.put("/#{db_name}/_design/update/_update/hello/#{docid}") + assert resp.status_code == 201 + assert resp.body == "

hello doc

" + assert String.contains?(resp.headers["Content-Type"], "charset=utf-8") + assert resp.headers["X-Couch-Id"] == docid + + resp = Couch.get("/#{db_name}/#{docid}") + assert resp.status_code == 200 + assert resp.body["world"] == "hello" + + # Fix for COUCHDB-379 + assert String.starts_with?(resp.headers["Server"], "CouchDB") + + resp = Couch.put("/#{db_name}/_design/update/_update/hello") + assert resp.status_code == 200 + assert resp.body == "

Empty World

" + end + + @tag :with_db + test "GET is not allowed", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + resp = Couch.get("/#{db_name}/_design/update/_update/hello") + assert resp.body["error"] == "method_not_allowed" + end + + @tag :with_db + test "doc can be created", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + resp = Couch.get("/#{db_name}/nonExistingDoc") + assert resp.status_code == 404 + + resp = Couch.put("/#{db_name}/_design/update/_update/hello/nonExistingDoc") + assert resp.status_code == 201 + assert resp.body == "

New World

" + + resp = Couch.get("/#{db_name}/nonExistingDoc") + assert resp.status_code == 200 + end + + @tag :with_db + test "in place update", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + {:ok, resp} = create_doc(db_name, @document) + docid = resp.body["id"] + + resp = + Couch.put( + "/#{db_name}/_design/update/_update/in-place/#{docid}?field=title&value=test" + ) + + assert resp.status_code == 201 + assert resp.body == "set title to test" + resp = Couch.get("/#{db_name}/#{docid}") + assert resp.status_code == 200 + assert resp.body["title"] == "test" + end + + @tag :with_db + test "form update via application/x-www-form-urlencoded", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + {:ok, resp} = create_doc(db_name, @document) + docid = resp.body["id"] + + resp = + Couch.put( + "/#{db_name}/_design/update/_update/form-update/#{docid}", + headers: ["Content-Type": "application/x-www-form-urlencoded"], + body: "formfoo=bar&formbar=foo" + ) + + assert resp.status_code == 201 + assert resp.body == "updated doc from form" + + resp = Couch.get("/#{db_name}/#{docid}") + assert resp.status_code == 200 + assert resp.body["formfoo"] == "bar" + assert resp.body["formbar"] == "foo" + end + + @tag :with_db + test "bump counter", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + {:ok, resp} = create_doc(db_name, @document) + docid = resp.body["id"] + + resp = + Couch.put("/#{db_name}/_design/update/_update/bump-counter/#{docid}", + headers: ["X-Couch-Full-Commit": "true"] + ) + + assert resp.status_code == 201 + assert resp.body == "

bumped it!

" + + resp = Couch.get("/#{db_name}/#{docid}") + assert resp.status_code == 200 + assert resp.body["counter"] == 1 + + resp = + Couch.put("/#{db_name}/_design/update/_update/bump-counter/#{docid}", + headers: ["X-Couch-Full-Commit": "true"] + ) + + newrev = resp.headers["X-Couch-Update-NewRev"] + + resp = Couch.get("/#{db_name}/#{docid}") + assert resp.status_code == 200 + assert resp.body["counter"] == 2 + assert resp.body["_rev"] == newrev + end + + @tag :with_db + test "Server provides UUID when POSTing without an ID in the URL", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + resp = Couch.put("/#{db_name}/_design/update/_update/get-uuid/") + assert resp.status_code == 200 + assert String.length(resp.body) == 32 + end + + @tag :with_db + test "COUCHDB-1229 - allow slashes in doc ids for update handlers", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + create_doc(db_name, %{_id: "with/slash", counter: 1}) + + resp = Couch.put("/#{db_name}/_design/update/_update/bump-counter/with/slash") + assert resp.status_code == 201 + assert resp.body == "

bumped it!

" + + resp = Couch.get("/#{db_name}/with%2Fslash") + assert resp.status_code == 200 + assert resp.body["counter"] == 2 + end + + @tag :with_db + test "COUCHDB-648 - the code in the JSON response should be honored", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + {:ok, resp} = create_doc(db_name, @document) + docid = resp.body["id"] + + Couch.put("/#{db_name}/_design/update/_update/bump-counter/#{docid}") + Couch.put("/#{db_name}/_design/update/_update/bump-counter/#{docid}") + + resp = Couch.put("/#{db_name}/_design/update/_update/code-n-bump/#{docid}") + assert resp.status_code == 302 + assert resp.body == "

bumped it!

" + + resp = Couch.get("/#{db_name}/#{docid}") + assert resp.status_code == 200 + assert resp.body["counter"] == 3 + + resp = Couch.put("/#{db_name}/_design/update/_update/resp-code/") + assert resp.status_code == 302 + + resp = Couch.put("/#{db_name}/_design/update/_update/resp-code-and-json/") + assert resp.status_code == 302 + assert resp.body["ok"] == true + end + + @tag :with_db + test "base64 response", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + {:ok, resp} = create_doc(db_name, @document) + docid = resp.body["id"] + + resp = + Couch.put("/#{db_name}/_design/update/_update/binary/#{docid}", + body: "rubbish" + ) + + assert resp.status_code == 201 + assert resp.body == "hello world!" + assert String.contains?(resp.headers["Content-Type"], "application/octet-stream") + end + + @tag :with_db + test "Insert doc with empty id", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + resp = Couch.put("/#{db_name}/_design/update/_update/empty/foo") + assert resp.status_code == 400 + assert resp.body["reason"] == "Document id must not be empty" + end +end diff --git a/test/elixir/test/users_db_security_test.exs b/test/elixir/test/users_db_security_test.exs new file mode 100644 index 00000000000..7b2c97df9fa --- /dev/null +++ b/test/elixir/test/users_db_security_test.exs @@ -0,0 +1,520 @@ +defmodule UsersDbSecurityTest do + use CouchTestCase + + @moduletag :authentication + @moduletag kind: :single_node + + @users_db "_users" + + @login_user %{ + jerry: "apple", + tom: "mp3", + spike: "foobar", + speedy: "test", + silvestre: "anchovy" + } + + setup_all do + # Create db if not exists + Couch.put("/#{@users_db}") + + retry_until(fn -> + resp = + Couch.get( + "/#{@users_db}/_changes", + query: [feed: "longpoll", timeout: 5000, filter: "_design"] + ) + + length(resp.body["results"]) > 0 + end) + + on_exit(&tear_down/0) + + :ok + end + + defp tear_down do + users = Map.keys(@login_user) + Enum.each(users, fn name -> + resp = Couch.get("/#{@users_db}/org.couchdb.user:#{name}") + if resp.status_code == 200 do + rev = resp.body["_rev"] + Couch.delete("/#{@users_db}/org.couchdb.user:#{name}?rev=#{rev}") + end + end) + end + + defp login_as(user, password \\ nil) do + pwd = + case password do + nil -> @login_user[String.to_atom(user)] + _ -> password + end + + sess = Couch.login(user, pwd) + assert sess.cookie, "Login correct is expected" + sess + end + + defp logout(session) do + assert Couch.Session.logout(session).body["ok"] + end + + defp open_as(db_name, doc_id, options) do + use_session = Keyword.get(options, :use_session) + user = Keyword.get(options, :user) + pwd = Keyword.get(options, :pwd) + expect_response = Keyword.get(options, :expect_response, 200) + expect_message = Keyword.get(options, :error_message) + + session = use_session || login_as(user, pwd) + + resp = + Couch.Session.get( + session, + "/#{db_name}/#{URI.encode(doc_id)}" + ) + + if use_session == nil do + logout(session) + end + + assert resp.status_code == expect_response + + if expect_message != nil do + assert resp.body["error"] == expect_message + end + + resp.body + end + + defp save_as(db_name, doc, options) do + use_session = Keyword.get(options, :use_session) + user = Keyword.get(options, :user) + expect_response = Keyword.get(options, :expect_response, [201, 202]) + expect_message = Keyword.get(options, :error_message) + + session = use_session || login_as(user) + + resp = + Couch.Session.put( + session, + "/#{db_name}/#{URI.encode(doc["_id"])}", + body: doc + ) + + if use_session == nil do + logout(session) + end + + if is_list(expect_response) do + assert resp.status_code in expect_response + else + assert resp.status_code == expect_response + end + + if expect_message != nil do + assert resp.body["error"] == expect_message + end + + resp + end + + defp view_as(db_name, view_name, options) do + use_session = Keyword.get(options, :use_session) + user = Keyword.get(options, :user) + pwd = Keyword.get(options, :pwd) + expect_response = Keyword.get(options, :expect_response, 200) + expect_message = Keyword.get(options, :error_message) + + session = use_session || login_as(user, pwd) + + [view_root, view_name] = String.split(view_name, "/") + + resp = + Couch.Session.get(session, "/#{db_name}/_design/#{view_root}/_view/#{view_name}") + + if use_session == nil do + logout(session) + end + + if is_list(expect_response) do + assert resp.status_code in expect_response + else + assert resp.status_code == expect_response + end + + if expect_message != nil do + assert resp.body["error"] == expect_message + end + + resp + end + + defp changes_as(db_name, options) do + use_session = Keyword.get(options, :use_session) + user = Keyword.get(options, :user) + expect_response = Keyword.get(options, :expect_response, [200, 202]) + expect_message = Keyword.get(options, :error_message) + + session = use_session || login_as(user) + + resp = + Couch.Session.get( + session, + "/#{db_name}/_changes" + ) + + if use_session == nil do + logout(session) + end + + if is_list(expect_response) do + assert resp.status_code in expect_response + else + assert resp.status_code == expect_response + end + + if expect_message != nil do + assert resp.body["error"] == expect_message + end + + resp + end + + defp request_raw_as(db_name, path, options) do + use_session = Keyword.get(options, :use_session) + user = Keyword.get(options, :user) + pwd = Keyword.get(options, :pwd) + expect_response = Keyword.get(options, :expect_response, 200) + expect_message = Keyword.get(options, :error_message) + + session = use_session || login_as(user, pwd) + + resp = + Couch.Session.get( + session, + "/#{db_name}/#{path}", + parse_response: false + ) + + if use_session == nil do + logout(session) + end + + if is_list(expect_response) do + assert resp.status_code in expect_response + else + assert resp.status_code == expect_response + end + + if expect_message != nil do + assert resp.body["error"] == expect_message + end + + resp + end + + defp request_as(db_name, path, options) do + use_session = Keyword.get(options, :use_session) + user = Keyword.get(options, :user) + pwd = Keyword.get(options, :pwd) + expect_response = Keyword.get(options, :expect_response, 200) + expect_message = Keyword.get(options, :error_message) + + session = use_session || login_as(user, pwd) + + resp = + Couch.Session.get( + session, + "/#{db_name}/#{path}" + ) + + if use_session == nil do + logout(session) + end + + if is_list(expect_response) do + assert resp.status_code in expect_response + else + assert resp.status_code == expect_response + end + + if expect_message != nil do + assert resp.body["error"] == expect_message + end + + resp + end + + defp set_security(db_name, security, expect_response \\ 200) do + resp = Couch.put("/#{db_name}/_security", body: security) + assert resp.status_code == expect_response + end + + @tag config: [ + { + "couchdb", + "users_db_security_editable", + "true" + }, + { + "couch_httpd_auth", + "iterations", + "1" + }, + { + "admins", + "jerry", + "apple" + } + ] + test "user db security" do + # _users db + # a doc with a field 'password' should be hashed to 'derived_key' + # with salt and salt stored in 'salt', 'password' is set to null. + # Exising 'derived_key' and 'salt' fields are overwritten with new values + # when a non-null 'password' field exists. + # anonymous should be able to create a user document + user_doc = %{ + _id: "org.couchdb.user:tom", + type: "user", + name: "tom", + password: "mp3", + roles: [] + } + + resp = + Couch.post("/#{@users_db}", body: user_doc, headers: [authorization: "annonymous"]) + + assert resp.status_code in [201, 202] + assert resp.body["ok"] + + user_doc = + retry_until(fn -> + user_doc = open_as(@users_db, "org.couchdb.user:tom", user: "tom") + assert !user_doc["password"] + assert String.length(user_doc["derived_key"]) == 40 + assert String.length(user_doc["salt"]) == 32 + user_doc + end) + + # anonymous should not be able to read an existing user's user document + resp = + Couch.get("/#{@users_db}/org.couchdb.user:tom", + headers: [authorization: "annonymous"] + ) + + assert resp.status_code == 404 + + # anonymous should not be able to read /_users/_changes + resp = Couch.get("/#{@users_db}/_changes", headers: [authorization: "annonymous"]) + assert resp.status_code == 401 + assert resp.body["error"] == "unauthorized" + + # user should be able to read their own document + tom_doc = open_as(@users_db, "org.couchdb.user:tom", user: "tom") + assert tom_doc["_id"] == "org.couchdb.user:tom" + + # user should not be able to read /_users/_changes + changes_as(@users_db, + user: "tom", + expect_response: 401, + expect_message: "unauthorized" + ) + + tom_doc = Map.put(tom_doc, "password", "couch") + save_as(@users_db, tom_doc, user: "tom") + + tom_doc = open_as(@users_db, "org.couchdb.user:tom", user: "jerry") + assert !tom_doc["password"] + assert String.length(tom_doc["derived_key"]) == 40 + assert String.length(tom_doc["salt"]) == 32 + assert tom_doc["derived_key"] != user_doc["derived_key"] + assert tom_doc["salt"] != user_doc["salt"] + + # user should not be able to read another user's user document + spike_doc = %{ + _id: "org.couchdb.user:spike", + type: "user", + name: "spike", + password: "foobar", + roles: [] + } + + {:ok, _} = create_doc(@users_db, spike_doc) + + open_as(@users_db, "org.couchdb.user:spike", + user: "tom", + pwd: "couch", + expect_response: 404 + ) + + speedy_doc = %{ + _id: "org.couchdb.user:speedy", + type: "user", + name: "speedy", + password: "test", + roles: ["user_admin"] + } + + {:ok, _} = create_doc(@users_db, speedy_doc) + + security = %{ + admins: %{ + roles: [], + names: ["speedy"] + } + } + + set_security(@users_db, security) + + # user should not be able to read from any view + ddoc = %{ + _id: "_design/user_db_auth", + views: %{ + test: %{ + map: "function(doc) { emit(doc._id, null); }" + } + }, + lists: %{ + names: """ + function(head, req) { + var row; while (row = getRow()) { send(row.key + \"\\n\"); } + } + """ + }, + shows: %{ + name: "function(doc, req) { return doc.name; }" + } + } + + create_doc(@users_db, ddoc) + + resp = + Couch.get("/#{@users_db}/_design/user_db_auth/_view/test", + headers: [authorization: "annonymous"] + ) + + assert resp.body["error"] == "forbidden" + + # admin should be able to read from any view + resp = view_as(@users_db, "user_db_auth/test", user: "jerry") + assert resp.body["total_rows"] == 3 + + # db admin should be able to read from any view + resp = view_as(@users_db, "user_db_auth/test", user: "speedy") + assert resp.body["total_rows"] == 3 + + # non-admins can't read design docs + open_as(@users_db, "_design/user_db_auth", + user: "tom", + pwd: "couch", + expect_response: 403, + expect_message: "forbidden" + ) + + # admin shold be able to read _list + result = + request_raw_as(@users_db, "_design/user_db_auth/_list/names/test", user: "jerry") + + assert result.status_code == 200 + assert length(String.split(result.body, "\n")) == 4 + + # non-admins can't read _list + request_raw_as(@users_db, "_design/user_db_auth/_list/names/test", + user: "tom", + pwd: "couch", + expect_response: 403 + ) + + # admin should be able to read _show + result = + request_raw_as(@users_db, "_design/user_db_auth/_show/name/org.couchdb.user:tom", + user: "jerry" + ) + + assert result.status_code == 200 + assert result.body == "tom" + + # non-admin should be able to access own _show + result = + request_raw_as(@users_db, "_design/user_db_auth/_show/name/org.couchdb.user:tom", + user: "tom", + pwd: "couch" + ) + + assert result.status_code == 200 + assert result.body == "tom" + + # non-admin can't read other's _show + request_raw_as(@users_db, "_design/user_db_auth/_show/name/org.couchdb.user:jerry", + user: "tom", + pwd: "couch", + expect_response: 404 + ) + + # admin should be able to read and edit any user doc + spike_doc = open_as(@users_db, "org.couchdb.user:spike", user: "jerry") + spike_doc = Map.put(spike_doc, "password", "mobile") + save_as(@users_db, spike_doc, user: "jerry") + + # admin should be able to read and edit any user doc + spike_doc = open_as(@users_db, "org.couchdb.user:spike", user: "jerry") + spike_doc = Map.put(spike_doc, "password", "mobile1") + save_as(@users_db, spike_doc, user: "speedy") + + security = %{ + admins: %{ + roles: ["user_admin"], + names: [] + } + } + + set_security(@users_db, security) + + # db admin should be able to read and edit any user doc + spike_doc = open_as(@users_db, "org.couchdb.user:spike", user: "jerry") + spike_doc = Map.put(spike_doc, "password", "mobile2") + save_as(@users_db, spike_doc, user: "speedy") + + # ensure creation of old-style docs still works + silvestre_doc = prepare_user_doc(name: "silvestre", password: "anchovy") + + resp = + Couch.post("/#{@users_db}", + body: silvestre_doc, + headers: [authorization: "annonymous"] + ) + + assert resp.body["ok"] + + run_on_modified_server( + [ + %{ + :section => "couch_httpd_auth", + :key => "public_fields", + :value => "name" + }, + %{ + :section => "couch_httpd_auth", + :key => "users_db_public", + :value => "false" + } + ], + fn -> + request_as(@users_db, "_all_docs?include_docs=true", + user: "tom", + pwd: "couch", + expect_response: 401, + expect_message: "unauthorized" + ) + + # COUCHDB-1888 make sure admins always get all fields + resp = request_as(@users_db, "_all_docs?include_docs=true", user: "jerry") + rows = resp.body["rows"] + assert Enum.at(rows, 2)["doc"]["type"] == "user" + end + ) + end +end diff --git a/test/elixir/test/users_db_test.exs b/test/elixir/test/users_db_test.exs index 71ab2f7e797..db86b27396d 100644 --- a/test/elixir/test/users_db_test.exs +++ b/test/elixir/test/users_db_test.exs @@ -2,6 +2,7 @@ defmodule UsersDbTest do use CouchTestCase @moduletag :authentication + @moduletag kind: :single_node @users_db_name "_users" @@ -50,28 +51,6 @@ defmodule UsersDbTest do create_db(@users_db_name) end - defp replicate(source, target, rep_options \\ []) do - headers = Keyword.get(rep_options, :headers, []) - body = Keyword.get(rep_options, :body, %{}) - - body = - body - |> Map.put("source", source) - |> Map.put("target", target) - - retry_until( - fn -> - resp = Couch.post("/_replicate", headers: headers, body: body, timeout: 10_000) - assert HTTPotion.Response.success?(resp) - assert resp.status_code == 200 - assert resp.body["ok"] - resp - end, - 500, - 20_000 - ) - end - defp save_as(db_name, doc, options) do session = Keyword.get(options, :use_session) expect_response = Keyword.get(options, :expect_response, [201, 202]) @@ -147,7 +126,8 @@ defmodule UsersDbTest do assert resp.body["userCtx"]["name"] == "jchris@apache.org" assert resp.body["info"]["authenticated"] == "default" assert resp.body["info"]["authentication_db"] == @users_db_name - assert resp.body["info"]["authentication_handlers"] == ["cookie", "default"] + assert Enum.member?(resp.body["info"]["authentication_handlers"], "cookie") + assert Enum.member?(resp.body["info"]["authentication_handlers"], "default") resp = Couch.get( diff --git a/test/elixir/test/utf8_test.exs b/test/elixir/test/utf8_test.exs index ad78080ae09..6afaee6067f 100644 --- a/test/elixir/test/utf8_test.exs +++ b/test/elixir/test/utf8_test.exs @@ -2,6 +2,7 @@ defmodule UTF8Test do use CouchTestCase @moduletag :utf8 + @moduletag kind: :single_node @moduledoc """ Test CouchDB UTF8 support @@ -29,7 +30,7 @@ defmodule UTF8Test do texts |> Enum.with_index() - |> Enum.each(fn {string, index} -> + |> Enum.each(fn {_, index} -> resp = Couch.get("/#{db_name}/#{index}") %{"_id" => id, "text" => text} = resp.body assert resp.status_code == 200 diff --git a/test/elixir/test/uuids_test.exs b/test/elixir/test/uuids_test.exs index bb9369b8065..355bed824b4 100644 --- a/test/elixir/test/uuids_test.exs +++ b/test/elixir/test/uuids_test.exs @@ -6,6 +6,9 @@ defmodule UUIDsTest do This is a port of the uuids.js suite """ + @moduletag :docs + @moduletag kind: :single_node + test "cache busting headers are set" do resp = Couch.get("/_uuids") assert resp.status_code == 200 diff --git a/test/elixir/test/view_collation_raw_test.exs b/test/elixir/test/view_collation_raw_test.exs new file mode 100644 index 00000000000..b9c40f6d88b --- /dev/null +++ b/test/elixir/test/view_collation_raw_test.exs @@ -0,0 +1,162 @@ +defmodule ViewCollationRawTest do + use CouchTestCase + + @moduledoc """ + Test CouchDB View Raw Collation Behavior + This is a port of the view_collation_raw.js suite + """ + + @moduletag :view_collation_raw + @moduletag kind: :single_node + + @values [ + # Then numbers + 1, + 2, + 3, + 4, + false, + :null, + true, + + # Then objects, compared each key value in the list until different. + # Larger objects sort after their subset objects + {[a: 1]}, + {[a: 2]}, + {[b: 1]}, + {[b: 2]}, + # Member order does matter for collation + {[b: 2, a: 1]}, + {[b: 2, c: 2]}, + + # Then arrays, compared element by element until different. + # Longer arrays sort after their prefixes + ["a"], + ["b"], + ["b", "c"], + ["b", "c", "a"], + ["b", "d"], + ["b", "d", "e"], + + # Then text, case sensitive + "A", + "B", + "a", + "aa", + "b", + "ba", + "bb" + ] + + setup_all do + db_name = random_db_name() + {:ok, _} = create_db(db_name) + on_exit(fn -> delete_db(db_name) end) + + {docs, _} = + Enum.flat_map_reduce(@values, 1, fn value, idx -> + doc = %{:_id => Integer.to_string(idx), :foo => value} + {[doc], idx + 1} + end) + + resp = Couch.post("/#{db_name}/_bulk_docs", body: %{:docs => docs}) + Enum.each(resp.body, &assert(&1["ok"])) + + map_fun = "function(doc) { emit(doc.foo, null); }" + + map_doc = %{ + :language => "javascript", + :views => %{:test => %{:map => map_fun, :options => %{:collation => "raw"}}} + } + + resp = Couch.put("/#{db_name}/_design/test", body: map_doc) + assert resp.body["ok"] + + {:ok, [db_name: db_name]} + end + + test "ascending collation order", context do + retry_until(fn -> + resp = Couch.get(url(context)) + pairs = Enum.zip(resp.body["rows"], @values) + + Enum.each(pairs, fn {row, value} -> + assert row["key"] == convert(value) + end) + end) + end + + test "raw semantics in key ranges", context do + retry_until(fn -> + resp = + Couch.get(url(context), + query: %{"startkey" => :jiffy.encode("Z"), "endkey" => :jiffy.encode("a")} + ) + + assert length(resp.body["rows"]) == 1 + assert Enum.at(resp.body["rows"], 0)["key"] == "a" + end) + end + + test "descending collation order", context do + retry_until(fn -> + resp = Couch.get(url(context), query: %{"descending" => "true"}) + pairs = Enum.zip(resp.body["rows"], Enum.reverse(@values)) + + Enum.each(pairs, fn {row, value} -> + assert row["key"] == convert(value) + end) + end) + end + + test "key query option", context do + Enum.each(@values, fn value -> + retry_until(fn -> + resp = Couch.get(url(context), query: %{:key => :jiffy.encode(value)}) + assert length(resp.body["rows"]) == 1 + assert Enum.at(resp.body["rows"], 0)["key"] == convert(value) + end) + end) + end + + test "inclusive_end=true", context do + query = %{:endkey => :jiffy.encode("b"), :inclusive_end => true} + resp = Couch.get(url(context), query: query) + assert Enum.at(resp.body["rows"], -1)["key"] == "b" + + query = Map.put(query, :descending, true) + resp = Couch.get(url(context), query: query) + assert Enum.at(resp.body["rows"], -1)["key"] == "b" + end + + test "inclusive_end=false", context do + query = %{:endkey => :jiffy.encode("b"), :inclusive_end => false} + resp = Couch.get(url(context), query: query) + assert Enum.at(resp.body["rows"], -1)["key"] == "aa" + + query = Map.put(query, :descending, true) + resp = Couch.get(url(context), query: query) + assert Enum.at(resp.body["rows"], -1)["key"] == "ba" + + query = %{ + :endkey => :jiffy.encode("b"), + :endkey_docid => 10, + :inclusive_end => false + } + + resp = Couch.get(url(context), query: query) + assert Enum.at(resp.body["rows"], -1)["key"] == "aa" + + query = Map.put(query, :endkey_docid, 11) + resp = Couch.get(url(context), query: query) + assert Enum.at(resp.body["rows"], -1)["key"] == "aa" + end + + def url(context) do + "/#{context[:db_name]}/_design/test/_view/test" + end + + def convert(value) do + :jiffy.decode(:jiffy.encode(value), [:return_maps]) + end +end diff --git a/test/elixir/test/view_collation_test.exs b/test/elixir/test/view_collation_test.exs index 7563ba41644..984d41234f8 100644 --- a/test/elixir/test/view_collation_test.exs +++ b/test/elixir/test/view_collation_test.exs @@ -6,6 +6,9 @@ defmodule ViewCollationTest do This is a port of the view_collation.js suite """ + @moduletag :view_collation + @moduletag kind: :single_node + @values [ # Special values sort before all other types :null, diff --git a/test/elixir/test/view_compaction_test.exs b/test/elixir/test/view_compaction_test.exs new file mode 100644 index 00000000000..ed7461aa194 --- /dev/null +++ b/test/elixir/test/view_compaction_test.exs @@ -0,0 +1,109 @@ +defmodule ViewCompactionTest do + use CouchTestCase + + @moduledoc """ + Test CouchDB View Compaction Behavior + This is a port of the view_compaction.js suite + """ + + @moduletag :view_compaction + @moduletag kind: :single_node + + @num_docs 1000 + + @ddoc %{ + _id: "_design/foo", + language: "javascript", + views: %{ + view1: %{ + map: "function(doc) { emit(doc._id, doc.value) }" + }, + view2: %{ + map: + "function(doc) { if (typeof(doc.integer) === 'number') {emit(doc._id, doc.integer);} }", + reduce: "function(keys, values, rereduce) { return sum(values); }" + } + } + } + + defp bulk_save_for_update(db_name, docs) do + resp = bulk_save(db_name, docs) + revs = resp.body + + Enum.map(docs, fn m -> + rev = Enum.at(revs, String.to_integer(m["_id"]))["rev"] + + m + |> Map.put("_rev", rev) + |> Map.update!("integer", &(&1 + 1)) + end) + end + + @tag :with_db + test "view compaction", context do + db_name = context[:db_name] + create_doc(db_name, @ddoc) + + docs = make_docs(0..(@num_docs - 1)) + docs = bulk_save_for_update(db_name, docs) + + resp = view(db_name, "foo/view1") + assert length(resp.body["rows"]) == @num_docs + + resp = view(db_name, "foo/view2") + assert length(resp.body["rows"]) == 1 + + resp = Couch.get("/#{db_name}/_design/foo/_info") + assert resp.body["view_index"]["update_seq"] == @num_docs + 1 + + docs = bulk_save_for_update(db_name, docs) + + resp = view(db_name, "foo/view1") + assert length(resp.body["rows"]) == @num_docs + + resp = view(db_name, "foo/view2") + assert length(resp.body["rows"]) == 1 + + resp = Couch.get("/#{db_name}/_design/foo/_info") + assert resp.body["view_index"]["update_seq"] == 2 * @num_docs + 1 + + bulk_save(db_name, docs) + resp = view(db_name, "foo/view1") + assert length(resp.body["rows"]) == @num_docs + + resp = view(db_name, "foo/view2") + assert length(resp.body["rows"]) == 1 + + resp = Couch.get("/#{db_name}/_design/foo/_info") + assert resp.body["view_index"]["update_seq"] == 3 * @num_docs + 1 + + disk_size_before_compact = resp.body["view_index"]["sizes"]["file"] + data_size_before_compact = resp.body["view_index"]["sizes"]["active"] + + assert is_integer(disk_size_before_compact) + assert data_size_before_compact < disk_size_before_compact + + resp = Couch.post("/#{db_name}/_compact/foo") + assert resp.body["ok"] == true + + retry_until(fn -> + resp = Couch.get("/#{db_name}/_design/foo/_info") + resp.body["view_index"]["compact_running"] == false + end) + + resp = view(db_name, "foo/view1") + assert length(resp.body["rows"]) == @num_docs + + resp = view(db_name, "foo/view2") + assert length(resp.body["rows"]) == 1 + + resp = Couch.get("/#{db_name}/_design/foo/_info") + assert resp.body["view_index"]["update_seq"] == 3 * @num_docs + 1 + + disk_size_after_compact = resp.body["view_index"]["sizes"]["file"] + data_size_after_compact = resp.body["view_index"]["sizes"]["active"] + assert disk_size_after_compact < disk_size_before_compact + assert is_integer(data_size_after_compact) + assert data_size_after_compact < disk_size_after_compact + end +end diff --git a/test/elixir/test/view_conflicts_test.exs b/test/elixir/test/view_conflicts_test.exs new file mode 100644 index 00000000000..89d9cb4eb23 --- /dev/null +++ b/test/elixir/test/view_conflicts_test.exs @@ -0,0 +1,75 @@ +defmodule ViewConflictsTest do + use CouchTestCase + + @moduletag :view_conflicts + @moduletag kind: :single_node + + setup_all do + db_name_a = random_db_name() + db_name_b = random_db_name() + + {:ok, _} = create_db(db_name_a) + {:ok, _} = create_db(db_name_b) + + on_exit(fn -> delete_db(db_name_a) end) + on_exit(fn -> delete_db(db_name_b) end) + {:ok, [db_name_a: db_name_a, db_name_b: db_name_b]} + end + + test "view conflict", context do + db_name_a = context[:db_name_a] + db_name_b = context[:db_name_b] + + create_doc(db_name_a, %{_id: "foo", bar: 42}) + replicate(db_name_a, db_name_b) + + resp = Couch.get("/#{db_name_b}/foo") + + docb = + resp.body + |> Map.put("bar", 43) + + docb = save(db_name_b, docb) + + resp = Couch.get("/#{db_name_a}/foo") + + doca = + resp.body + |> Map.put("bar", 41) + + doca = save(db_name_a, doca) + + replicate(db_name_a, db_name_b) + + resp = Couch.get("/#{db_name_b}/foo", query: [conflicts: true]) + doc = resp.body + assert length(resp.body["_conflicts"]) == 1 + + conflict_rev = Enum.at(resp.body["_conflicts"], 0) + + case doc["bar"] do + 41 -> assert conflict_rev == docb["_rev"] + 43 -> assert conflict_rev == doca["_rev"] + _ -> assert false + end + + map_fun = """ + function(doc) { + if (doc._conflicts) { + emit(doc._id, doc._conflicts); + } + } + """ + + results = query(db_name_b, map_fun) + + rev = + results + |> Map.get("rows") + |> Enum.at(0) + |> Map.get("value") + |> Enum.at(0) + + assert conflict_rev == rev + end +end diff --git a/test/elixir/test/view_errors_test.exs b/test/elixir/test/view_errors_test.exs new file mode 100644 index 00000000000..1e8f880a654 --- /dev/null +++ b/test/elixir/test/view_errors_test.exs @@ -0,0 +1,301 @@ +defmodule ViewErrorsTest do + use CouchTestCase + + @moduletag :view_errors + @moduletag kind: :single_node + + @document %{integer: 1, string: "1", array: [1, 2, 3]} + + @tag :with_db + test "emit undefined key results as null", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @document) + + map_fun = """ + function(doc) { + emit(doc.undef, null); + } + """ + + # emitting a key value that is undefined should result in that row + # being included in the view results as null + results = query(db_name, map_fun) + assert results["total_rows"] == 1 + assert Enum.at(results["rows"], 0)["key"] == :null + end + + @tag :with_db + test "exception in map function", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @document) + + map_fun = """ + function(doc) { + doc.undef(); // throws an error + } + """ + + # if a view function throws an exception, its results are not included in + # the view index, but the view does not itself raise an error + results = query(db_name, map_fun) + assert results["total_rows"] == 0 + end + + @tag :with_db + test "emit undefined value results as null", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @document) + + map_fun = """ + function(doc) { + emit([doc._id, doc.undef], null); + } + """ + + # if a view function includes an undefined value in the emitted key or + # value, it is treated as null + results = query(db_name, map_fun) + assert results["total_rows"] == 1 + + key = + results["rows"] + |> Enum.at(0) + |> Map.get("key") + |> Enum.at(1) + + assert key == :null + end + + @tag :with_db + test "query view with invalid params", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @document) + + body = %{ + language: "javascript", + map: "function(doc){emit(doc.integer)}" + } + + # querying a view with invalid params should give a resonable error message + resp = + Couch.post("/#{db_name}/_all_docs?startkey=foo", + headers: ["Content-Type": "application/json"], + body: body + ) + + assert resp.body["error"] == "bad_request" + + resp = + Couch.post("/#{db_name}/_all_docs", + headers: ["Content-Type": "application/x-www-form-urlencoded"], + body: body + ) + + assert resp.status_code == 415 + end + + @tag :with_db + test "query parse error", context do + db_name = context[:db_name] + + map_fun = """ + function(doc) { + emit(doc.integer, doc.integer); + } + """ + + ddoc_name = create_view(db_name, map_fun) + + resp = Couch.get("/#{db_name}/#{ddoc_name}/_view/view", query: [group: true]) + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + + map_fun = "function() {emit(null, null)}" + ddoc_name = create_view(db_name, map_fun) + + resp = + Couch.get("/#{db_name}/#{ddoc_name}/_view/view", query: [startkey: 2, endkey: 1]) + + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + assert String.contains?(resp.body["reason"], "No rows can match") + + design_doc = %{ + _id: "_design/test", + language: "javascript", + views: %{ + no_reduce: %{map: "function(doc) {emit(doc._id, null);}"}, + with_reduce: %{ + map: "function (doc) {emit(doc.integer, doc.integer)};", + reduce: "function (keys, values) { return sum(values); };" + } + } + } + + {:ok, _} = create_doc(db_name, design_doc) + + resp = Couch.get("/#{db_name}/_design/test/_view/no_reduce", query: [group: true]) + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + + resp = Couch.get("/#{db_name}/_design/test/_view/no_reduce", query: [group_level: 1]) + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + + resp = Couch.get("/#{db_name}/_design/test/_view/no_reduce", query: [reduce: true]) + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + + resp = Couch.get("/#{db_name}/_design/test/_view/no_reduce", query: [reduce: false]) + assert resp.status_code == 200 + + resp = + Couch.get("/#{db_name}/_design/test/_view/with_reduce", + query: [group: true, reduce: false] + ) + + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + + resp = + Couch.get("/#{db_name}/_design/test/_view/with_reduce", + query: [group_level: 1, reduce: false] + ) + + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + end + + @tag :with_db + test "infinite loop", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @document) + + design_doc3 = %{ + _id: "_design/infinite", + language: "javascript", + views: %{ + infinite_loop: %{ + map: "function(doc) {while(true){emit(doc,doc);}};" + } + } + } + + {:ok, _} = create_doc(db_name, design_doc3) + + resp = Couch.get("/#{db_name}/_design/infinite/_view/infinite_loop") + assert resp.status_code == 500 + # This test has two different races. The first is whether + # the while loop exhausts the JavaScript RAM limits before + # timing. The second is a race between which of two timeouts + # fires first. The first timeout is the couch_os_process + # waiting for data back from couchjs. The second is the + # gen_server call to couch_os_process. + assert resp.body["error"] == "os_process_error" or resp.body["error"] == "timeout" + end + + @tag :with_db + test "error responses for invalid multi-get bodies", context do + db_name = context[:db_name] + + design_doc = %{ + _id: "_design/test", + language: "javascript", + views: %{ + no_reduce: %{map: "function(doc) {emit(doc._id, null);}"}, + with_reduce: %{ + map: "function (doc) {emit(doc.integer, doc.integer)};", + reduce: "function (keys, values) { return sum(values); };" + } + } + } + + {:ok, _} = create_doc(db_name, design_doc) + + resp = + Couch.post("/#{db_name}/_design/test/_view/no_reduce", + body: "[]" + ) + + assert resp.status_code == 400 + assert resp.body["error"] == "bad_request" + assert resp.body["reason"] == "Request body must be a JSON object" + + resp = + Couch.post("/#{db_name}/_design/test/_view/no_reduce", + body: %{keys: 1} + ) + + assert resp.status_code == 400 + assert resp.body["error"] == "bad_request" + assert resp.body["reason"] == "`keys` member must be an array." + end + + @tag :with_db + test "reduce overflow error", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @document) + + design_doc2 = %{ + _id: "_design/testbig", + language: "javascript", + views: %{ + reduce_too_big: %{ + map: "function (doc) {emit(doc.integer, doc.integer)};", + reduce: + "function (keys, values) { var chars = []; for (var i=0; i < 1000; i++) {chars.push('wazzap');};return chars; };" + } + } + } + + {:ok, _} = create_doc(db_name, design_doc2) + + resp = Couch.get("/#{db_name}/_design/testbig/_view/reduce_too_big") + assert resp.status_code == 200 + # if the reduce grows to fast, throw an overflow error + assert Enum.at(resp.body["rows"], 0)["error"] == "reduce_overflow_error" + end + + @tag :with_db + test "temporary view should give error message", context do + db_name = context[:db_name] + + resp = + Couch.post("/#{db_name}/_temp_view", + headers: ["Content-Type": "application/json"], + body: %{ + language: "javascript", + map: "function(doc){emit(doc.integer)}" + } + ) + + assert resp.status_code == 410 + assert resp.body["error"] == "gone" + assert resp.body["reason"] == "Temporary views are not supported in CouchDB" + end + + defp create_view(db_name, map_fun) do + ddoc_name = "_design/temp_#{now(:ms)}" + + ddoc = %{ + _id: ddoc_name, + language: "javascript", + views: %{ + view: %{map: map_fun} + } + } + + {:ok, _} = create_doc(db_name, ddoc) + ddoc_name + end + + defp now(:ms) do + case elem(:os.type(), 0) do + :win32 -> + div(:erlang.system_time(), 1_000) + + _ -> + div(:erlang.system_time(), 1_000_000) + end + end +end diff --git a/test/elixir/test/view_include_docs_test.exs b/test/elixir/test/view_include_docs_test.exs new file mode 100644 index 00000000000..1c2ab57315b --- /dev/null +++ b/test/elixir/test/view_include_docs_test.exs @@ -0,0 +1,264 @@ +defmodule ViewIncludeDocsTest do + use CouchTestCase + + @moduletag :view_include_docs + @moduletag kind: :single_node + + @ddoc %{ + _id: "_design/test", + language: "javascript", + views: %{ + all_docs: %{ + map: "function(doc) { emit(doc.integer, doc.string) }" + }, + with_prev: %{ + map: + "function(doc){if(doc.prev) emit(doc._id,{'_rev':doc.prev}); else emit(doc._id,{'_rev':doc._rev});}" + }, + with_id: %{ + map: + "function(doc) {if(doc.link_id) { var value = {'_id':doc.link_id}; if (doc.link_rev) {value._rev = doc.link_rev}; emit(doc._id, value);}};" + }, + summate: %{ + map: + "function (doc) { if (typeof doc.integer === 'number') {emit(doc.integer, doc.integer)};}", + reduce: "function (keys, values) { return sum(values); };" + } + } + } + + setup_all do + db_name = random_db_name() + {:ok, _} = create_db(db_name) + on_exit(fn -> delete_db(db_name) end) + + bulk_save(db_name, make_docs(0..99)) + + create_doc(db_name, @ddoc) + + {:ok, [db_name: db_name]} + end + + test "include docs in view", context do + db_name = context[:db_name] + resp = view(db_name, "test/all_docs", %{include_docs: true, limit: 2}) + assert length(resp.body["rows"]) == 2 + row0 = Enum.at(resp.body["rows"], 0) + assert row0["id"] == "0" + assert row0["doc"]["_id"] == "0" + row1 = Enum.at(resp.body["rows"], 1) + assert row1["id"] == "1" + assert row1["doc"]["_id"] == "1" + + resp = view(db_name, "test/all_docs", %{include_docs: true}, [29, 74]) + assert length(resp.body["rows"]) == 2 + row0 = Enum.at(resp.body["rows"], 0) + assert row0["doc"]["_id"] == "29" + row1 = Enum.at(resp.body["rows"], 1) + assert row1["doc"]["integer"] == 74 + end + + test "include docs in all_docs", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_all_docs", + query: [limit: 2, skip: 1, include_docs: true] + ) + + assert length(resp.body["rows"]) == 2 + row0 = Enum.at(resp.body["rows"], 0) + row1 = Enum.at(resp.body["rows"], 1) + assert row0["doc"]["integer"] == 1 + assert row1["doc"]["integer"] == 10 + + resp = + Couch.post("/#{db_name}/_all_docs", + query: [include_docs: true], + headers: ["Content-Type": "application/json"], + body: %{"keys" => ["not_a_doc"]} + ) + + assert length(resp.body["rows"]) == 1 + row0 = Enum.at(resp.body["rows"], 0) + assert not Map.has_key?(row0, "doc") + + resp = + Couch.post("/#{db_name}/_all_docs", + query: [include_docs: true], + headers: ["Content-Type": "application/json"], + body: %{"keys" => ["1", "foo"]} + ) + + assert length(resp.body["rows"]) == 2 + row0 = Enum.at(resp.body["rows"], 0) + row1 = Enum.at(resp.body["rows"], 1) + assert row0["doc"]["integer"] == 1 + assert not Map.has_key?(row1, "doc") + + resp = + Couch.get("/#{db_name}/_all_docs", + query: [limit: 0, include_docs: true] + ) + + assert Enum.empty?(resp.body["rows"]) + end + + test "no reduce support", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design/test/_view/summate", query: [include_docs: true]) + + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + end + + test "Reduce support when reduce=false", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design/test/_view/summate", + query: [reduce: false, include_docs: true] + ) + + assert length(resp.body["rows"]) == 100 + end + + test "Not an error with include_docs=false&reduce=true", context do + db_name = context[:db_name] + + resp = + Couch.get("/#{db_name}/_design/test/_view/summate", + query: [reduce: true, include_docs: false] + ) + + assert length(resp.body["rows"]) == 1 + row0 = Enum.at(resp.body["rows"], 0) + assert row0["value"] == 4950 + end + + @tag :with_db + test "link to another doc from a value", context do + db_name = context[:db_name] + + bulk_save(db_name, make_docs(0..99)) + create_doc(db_name, @ddoc) + + doc_link = %{ + _id: "link-to-10", + link_id: "10" + } + + {:ok, _} = create_doc(db_name, doc_link) + resp = view(db_name, "test/with_id", %{key: ~s("link-to-10")}) + assert length(resp.body["rows"]) == 1 + row0 = Enum.at(resp.body["rows"], 0) + assert row0["key"] == "link-to-10" + assert row0["value"]["_id"] == "10" + + resp = view(db_name, "test/with_id", %{key: ~s("link-to-10"), include_docs: true}) + assert length(resp.body["rows"]) == 1 + row0 = Enum.at(resp.body["rows"], 0) + assert row0["value"]["_id"] == "10" + assert row0["doc"]["_id"] == "10" + end + + @tag :with_db + test "emitted _rev controls things", context do + db_name = context[:db_name] + + bulk_save(db_name, make_docs(0..99)) + create_doc(db_name, @ddoc) + + resp = + Couch.post("/#{db_name}/_all_docs", + query: [include_docs: true], + headers: ["Content-Type": "application/json"], + body: %{"keys" => ["0"]} + ) + + doc_before = Enum.at(resp.body["rows"], 0)["doc"] + + resp = Couch.get("/#{db_name}/0") + assert resp.status_code == 200 + prev = resp.body["_rev"] + + doc_after = + resp.body + |> Map.put("integer", 100) + |> Map.put("prev", prev) + + saved_doc = save(db_name, doc_after) + + resp = Couch.get("/#{db_name}/0") + assert resp.status_code == 200 + doc_after = resp.body + assert doc_after["_rev"] == saved_doc["_rev"] + assert doc_after["_rev"] != doc_after["prev"] + assert doc_after["integer"] == 100 + + resp = view(db_name, "test/with_prev", %{include_docs: true}, ["0"]) + row0 = Enum.at(resp.body["rows"], 0)["doc"] + assert row0["_id"] == "0" + assert row0["_rev"] == doc_before["_rev"] + assert not Map.has_key?(row0, "prev") + assert assert row0["integer"] == 0 + end + + test "COUCHDB-549 - include_docs=true with conflicts=true" do + db_name_a = random_db_name() + db_name_b = random_db_name() + create_db(db_name_a) + create_db(db_name_b) + on_exit(fn -> delete_db(db_name_a) end) + on_exit(fn -> delete_db(db_name_b) end) + + ddoc = %{ + _id: "_design/mydesign", + language: "javascript", + views: %{ + myview: %{ + map: """ + function(doc) { + emit(doc.value, 1); + } + """ + } + } + } + + {:ok, _} = create_doc(db_name_a, ddoc) + + doc1a = %{_id: "foo", value: 1, str: "1"} + {:ok, _} = create_doc(db_name_a, doc1a) + + doc1b = %{_id: "foo", value: 1, str: "666"} + {:ok, _} = create_doc(db_name_b, doc1b) + + doc2 = %{_id: "bar", value: 2, str: "2"} + {:ok, _} = create_doc(db_name_a, doc2) + + replicate(db_name_a, db_name_b) + + resp = Couch.get("/#{db_name_b}/foo", query: [conflicts: true]) + assert resp.status_code == 200 + doc1b = resp.body + assert Map.has_key?(doc1b, "_conflicts") + assert length(doc1b["_conflicts"]) == 1 + conflict_rev = Enum.at(doc1b["_conflicts"], 0) + + resp = Couch.get("/#{db_name_b}/bar", query: [conflicts: true]) + assert resp.status_code == 200 + doc2 = resp.body + assert not Map.has_key?(doc2, "_conflicts") + + resp = view(db_name_b, "mydesign/myview", %{include_docs: true, conflicts: true}) + assert length(resp.body["rows"]) == 2 + row0 = Enum.at(resp.body["rows"], 0)["doc"] + assert length(row0["_conflicts"]) == 1 + assert Enum.at(row0["_conflicts"], 0) == conflict_rev + row1 = Enum.at(resp.body["rows"], 1)["doc"] + assert not Map.has_key?(row1, "_conflicts") + end +end diff --git a/test/elixir/test/view_multi_key_all_docs_test.exs b/test/elixir/test/view_multi_key_all_docs_test.exs new file mode 100644 index 00000000000..c983905805f --- /dev/null +++ b/test/elixir/test/view_multi_key_all_docs_test.exs @@ -0,0 +1,194 @@ +defmodule ViewMultiKeyAllDocsTest do + use CouchTestCase + + @moduletag :view_multi_key_all_docs + @moduletag kind: :single_node + + @keys ["10", "15", "30", "37", "50"] + + setup_all do + db_name = random_db_name() + {:ok, _} = create_db(db_name) + on_exit(fn -> delete_db(db_name) end) + + bulk_save(db_name, make_docs(0..99)) + + {:ok, [db_name: db_name]} + end + + test "keys in POST body", context do + db_name = context[:db_name] + + resp = all_docs(db_name, nil, @keys) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == length(@keys) + + rows_id = Enum.map(rows, & &1["id"]) + assert rows_id == @keys + end + + test "keys in GET parameters", context do + db_name = context[:db_name] + resp = all_docs(db_name, keys: :jiffy.encode(@keys)) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == length(@keys) + rows_id = Enum.map(rows, & &1["id"]) + assert rows_id == @keys + end + + test "keys in POST body (limit)", context do + db_name = context[:db_name] + + resp = all_docs(db_name, [limit: 1], @keys) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == 1 + assert Enum.at(rows, 0)["id"] == Enum.at(@keys, 0) + end + + test "keys in GET parameters (limit)", context do + db_name = context[:db_name] + resp = all_docs(db_name, limit: 1, keys: :jiffy.encode(@keys)) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == 1 + assert Enum.at(rows, 0)["id"] == Enum.at(@keys, 0) + end + + test "keys in POST body (skip)", context do + db_name = context[:db_name] + + resp = all_docs(db_name, [skip: 2], @keys) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == 3 + + rows_id = Enum.map(rows, & &1["id"]) + assert rows_id == Enum.drop(@keys, 2) + end + + test "keys in GET parameters (skip)", context do + db_name = context[:db_name] + resp = all_docs(db_name, skip: 2, keys: :jiffy.encode(@keys)) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == 3 + rows_id = Enum.map(rows, & &1["id"]) + assert rows_id == Enum.drop(@keys, 2) + end + + test "keys in POST body (descending)", context do + db_name = context[:db_name] + + resp = all_docs(db_name, [descending: true], @keys) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == length(@keys) + + rows_id = Enum.map(rows, & &1["id"]) + assert rows_id == Enum.reverse(@keys) + end + + test "keys in GET parameters (descending)", context do + db_name = context[:db_name] + resp = all_docs(db_name, descending: true, keys: :jiffy.encode(@keys)) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == length(@keys) + rows_id = Enum.map(rows, & &1["id"]) + assert rows_id == Enum.reverse(@keys) + end + + test "keys in POST body (descending, skip, limit)", context do + db_name = context[:db_name] + + resp = all_docs(db_name, [descending: "true", skip: 3, limit: 1], @keys) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == 1 + + key = + @keys + |> Enum.reverse() + |> Enum.drop(3) + |> Enum.at(0) + + assert Enum.at(rows, 0)["id"] == key + end + + test "keys in GET parameters (descending, skip, limit)", context do + db_name = context[:db_name] + + resp = + all_docs(db_name, descending: "true", skip: 3, limit: 1, keys: :jiffy.encode(@keys)) + + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == 1 + + key = + @keys + |> Enum.reverse() + |> Enum.drop(3) + |> Enum.at(0) + + assert Enum.at(rows, 0)["id"] == key + end + + test "POST - get invalid rows when the key doesn't exist", context do + db_name = context[:db_name] + + resp = all_docs(db_name, nil, ["1211", "i_dont_exist", "0"]) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == 3 + assert Enum.at(rows, 0)["error"] == "not_found" + assert not Map.has_key?(Enum.at(rows, 0), "id") + assert Enum.at(rows, 1)["error"] == "not_found" + assert not Map.has_key?(Enum.at(rows, 1), "id") + assert Enum.at(rows, 2)["id"] == Enum.at(rows, 2)["key"] + assert Enum.at(rows, 2)["key"] == "0" + end + + test "GET - get invalid rows when the key doesn't exist", context do + db_name = context[:db_name] + + resp = all_docs(db_name, keys: :jiffy.encode(["1211", "i_dont_exist", "0"])) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert length(rows) == 3 + assert Enum.at(rows, 0)["error"] == "not_found" + assert not Map.has_key?(Enum.at(rows, 0), "id") + assert Enum.at(rows, 1)["error"] == "not_found" + assert not Map.has_key?(Enum.at(rows, 1), "id") + assert Enum.at(rows, 2)["id"] == Enum.at(rows, 2)["key"] + assert Enum.at(rows, 2)["key"] == "0" + end + + test "empty keys", context do + db_name = context[:db_name] + + resp = all_docs(db_name, keys: :jiffy.encode([])) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert Enum.empty?(rows) + end + + defp all_docs(db_name, options, keys \\ nil) do + resp = + case keys do + nil -> + Couch.get("/#{db_name}/_all_docs", query: options) + + _ -> + Couch.post("/#{db_name}/_all_docs", + query: options, + body: %{"keys" => keys} + ) + end + + resp + end +end diff --git a/test/elixir/test/view_multi_key_design_test.exs b/test/elixir/test/view_multi_key_design_test.exs new file mode 100644 index 00000000000..03cdc574d45 --- /dev/null +++ b/test/elixir/test/view_multi_key_design_test.exs @@ -0,0 +1,319 @@ +defmodule ViewMultiKeyDesignTest do + use CouchTestCase + + @moduletag :view_multi_key_design + @moduletag kind: :single_node + + @keys [10, 15, 30, 37, 50] + + @ddoc %{ + _id: "_design/test", + language: "javascript", + views: %{ + all_docs: %{ + map: "function(doc) { emit(doc.integer, doc.string) }" + }, + multi_emit: %{ + map: "function(doc) {for(var i = 0 ; i < 3 ; i++) { emit(i, doc.integer) ; } }" + }, + summate: %{ + map: "function (doc) {emit(doc.integer, doc.integer)};", + reduce: "function (keys, values) { return sum(values); };" + } + } + } + + setup_all do + db_name = random_db_name() + {:ok, _} = create_db(db_name) + on_exit(fn -> delete_db(db_name) end) + + bulk_save(db_name, make_docs(0..99)) + {:ok, _} = create_doc(db_name, @ddoc) + + {:ok, [db_name: db_name]} + end + + test "that missing keys work too", context do + db_name = context[:db_name] + keys = [101, 30, 15, 37, 50] + resp = view(db_name, "test/summate", [group: true], keys) + rows = resp.body["rows"] + assert length(rows) == length(keys) - 1 + + assert Enum.all?(rows, &Enum.member?(keys, &1["key"])) + assert Enum.all?(rows, &(&1["key"] == &1["value"])) + end + + test "keys in POST body", context do + db_name = context[:db_name] + resp = view(db_name, "test/all_docs", nil, @keys) + rows = resp.body["rows"] + assert length(rows) == length(@keys) + assert Enum.all?(rows, &Enum.member?(@keys, &1["key"])) + assert Enum.all?(rows, &(&1["key"] == String.to_integer(&1["value"]))) + end + + test "keys in GET parameters", context do + db_name = context[:db_name] + resp = view(db_name, "test/all_docs", keys: :jiffy.encode(@keys)) + rows = resp.body["rows"] + assert length(rows) == length(@keys) + assert Enum.all?(rows, &Enum.member?(@keys, &1["key"])) + assert Enum.all?(rows, &(&1["key"] == String.to_integer(&1["value"]))) + end + + test "empty keys", context do + db_name = context[:db_name] + + resp = view(db_name, "test/all_docs", keys: :jiffy.encode([])) + assert resp.status_code == 200 + rows = resp.body["rows"] + assert Enum.empty?(rows) + end + + test "keys in POST body (group)", context do + db_name = context[:db_name] + resp = view(db_name, "test/summate", [group: true], @keys) + rows = resp.body["rows"] + assert length(rows) == length(@keys) + assert Enum.all?(rows, &Enum.member?(@keys, &1["key"])) + assert Enum.all?(rows, &(&1["key"] == &1["value"])) + end + + test "keys in GET body (group)", context do + db_name = context[:db_name] + resp = view(db_name, "test/summate", group: true, keys: :jiffy.encode(@keys)) + rows = resp.body["rows"] + assert length(rows) == length(@keys) + assert Enum.all?(rows, &Enum.member?(@keys, &1["key"])) + assert Enum.all?(rows, &(&1["key"] == &1["value"])) + end + + test "POST - invalid parameter combinations get rejected ", context do + db_name = context[:db_name] + + badargs = [[startkey: 0], [endkey: 0], [key: 0], [group_level: 2]] + + Enum.each(badargs, fn args -> + resp = + Couch.post("/#{db_name}/_design/test/_view/all_docs", + query: args, + body: %{"keys" => @keys} + ) + + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + end) + + resp = + Couch.post("/#{db_name}/_design/test/_view/summate", + query: nil, + body: %{"keys" => @keys} + ) + + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + end + + test "GET - invalid parameter combinations get rejected ", context do + db_name = context[:db_name] + + badargs = [ + [startkey: 0, keys: :jiffy.encode(@keys)], + [endkey: 0, keys: :jiffy.encode(@keys)], + [key: 0, keys: :jiffy.encode(@keys)], + [group_level: 2, keys: :jiffy.encode(@keys)] + ] + + Enum.each(badargs, fn args -> + resp = + Couch.get("/#{db_name}/_design/test/_view/all_docs", + query: args + ) + + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + end) + + resp = + Couch.get("/#{db_name}/_design/test/_view/summate", + query: [keys: :jiffy.encode(@keys)], + body: %{"keys" => @keys} + ) + + assert resp.status_code == 400 + assert resp.body["error"] == "query_parse_error" + end + + test "that a map & reduce containing func support keys when reduce=false", context do + db_name = context[:db_name] + resp = view(db_name, "test/summate", [reduce: false], @keys) + assert length(resp.body["rows"]) == 5 + + resp = view(db_name, "test/summate", reduce: false, keys: :jiffy.encode(@keys)) + assert length(resp.body["rows"]) == 5 + end + + test "that limiting by startkey_docid and endkey_docid get applied", context do + db_name = context[:db_name] + + exp_key = [0, 0, 0, 2, 2, 2] + exp_val = [21, 22, 23, 21, 22, 23] + + resp = + view(db_name, "test/multi_emit", [startkey_docid: 21, endkey_docid: 23], [0, 2]) + + rows = resp.body["rows"] + rows_key = Enum.map(rows, & &1["key"]) + assert rows_key == exp_key + + rows_value = Enum.map(rows, & &1["value"]) + assert rows_value == exp_val + + resp = + view(db_name, "test/multi_emit", + startkey_docid: 21, + endkey_docid: 23, + keys: :jiffy.encode([0, 2]) + ) + + rows = resp.body["rows"] + rows_key = Enum.map(rows, & &1["key"]) + assert rows_key == exp_key + + rows_value = Enum.map(rows, & &1["value"]) + assert rows_value == exp_val + end + + test "limit works", context do + db_name = context[:db_name] + + resp = view(db_name, "test/all_docs", [limit: 1], @keys) + rows = resp.body["rows"] + assert length(rows) == 1 + assert Enum.at(rows, 0)["key"] == 10 + + resp = view(db_name, "test/all_docs", limit: 1, keys: :jiffy.encode(@keys)) + rows = resp.body["rows"] + assert length(rows) == 1 + assert Enum.at(rows, 0)["key"] == 10 + end + + test "offset works", context do + db_name = context[:db_name] + + resp = view(db_name, "test/multi_emit", [skip: 1], [0]) + rows = resp.body["rows"] + assert length(rows) == 99 + + resp = view(db_name, "test/multi_emit", skip: 1, keys: :jiffy.encode([0])) + rows = resp.body["rows"] + assert length(rows) == 99 + end + + test "dir works", context do + db_name = context[:db_name] + + resp = view(db_name, "test/multi_emit", [descending: true], [1]) + rows = resp.body["rows"] + assert length(rows) == 100 + + resp = view(db_name, "test/multi_emit", descending: true, keys: :jiffy.encode([1])) + rows = resp.body["rows"] + assert length(rows) == 100 + end + + test "argument combinations", context do + db_name = context[:db_name] + + resp = view(db_name, "test/multi_emit", [descending: true, skip: 3, limit: 2], [2]) + rows = resp.body["rows"] + assert length(rows) == 2 + + resp = + view(db_name, "test/multi_emit", + descending: true, + skip: 3, + limit: 2, + keys: :jiffy.encode([2]) + ) + + rows = resp.body["rows"] + assert length(rows) == 2 + + resp = + view(db_name, "test/multi_emit", [skip: 0, limit: 1, startkey_docid: "13"], [0]) + + rows = resp.body["rows"] + assert length(rows) == 1 + assert Enum.at(rows, 0)["value"] == 13 + + resp = + view(db_name, "test/multi_emit", [skip: 2, limit: 3, startkey_docid: "13"], [0]) + + rows = resp.body["rows"] + assert length(rows) == 3 + + resp = + view(db_name, "test/multi_emit", + skip: 2, + limit: 3, + startkey_docid: "13", + keys: :jiffy.encode([0]) + ) + + rows = resp.body["rows"] + assert length(rows) == 3 + + resp = + view( + db_name, + "test/multi_emit", + [skip: 1, limit: 5, startkey_docid: "25", endkey_docid: "27"], + [1] + ) + + rows = resp.body["rows"] + assert length(rows) == 2 + assert Enum.at(rows, 0)["value"] == 26 or assert(Enum.at(rows, 0)["value"] == 27) + + resp = + view(db_name, "test/multi_emit", + skip: 1, + limit: 5, + startkey_docid: "25", + endkey_docid: "27", + keys: :jiffy.encode([1]) + ) + + rows = resp.body["rows"] + assert length(rows) == 2 + assert Enum.at(rows, 0)["value"] == 26 or assert(Enum.at(rows, 0)["value"] == 27) + + resp = + view( + db_name, + "test/multi_emit", + [skip: 1, limit: 5, startkey_docid: "28", endkey_docid: "26", descending: true], + [1] + ) + + rows = resp.body["rows"] + assert length(rows) == 2 + assert Enum.at(rows, 0)["value"] == 26 or assert(Enum.at(rows, 0)["value"] == 27) + + resp = + view(db_name, "test/multi_emit", + skip: 1, + limit: 5, + startkey_docid: "28", + endkey_docid: "26", + descending: true, + keys: :jiffy.encode([1]) + ) + + rows = resp.body["rows"] + assert length(rows) == 2 + end +end diff --git a/test/elixir/test/view_offsets_test.exs b/test/elixir/test/view_offsets_test.exs new file mode 100644 index 00000000000..9361a54aeb5 --- /dev/null +++ b/test/elixir/test/view_offsets_test.exs @@ -0,0 +1,101 @@ +defmodule ViewOffsetTest do + use CouchTestCase + + @moduletag :view_offsets + @moduletag kind: :single_node + + @moduledoc """ + Tests about view offsets. + This is a port of the view_offsets.js javascript test suite. + """ + + @docs [ + %{"_id" => "a1", "letter" => "a", "number" => 1, "foo" => "bar"}, + %{"_id" => "a2", "letter" => "a", "number" => 2, "foo" => "bar"}, + %{"_id" => "a3", "letter" => "a", "number" => 3, "foo" => "bar"}, + %{"_id" => "b1", "letter" => "b", "number" => 1, "foo" => "bar"}, + %{"_id" => "b2", "letter" => "b", "number" => 2, "foo" => "bar"}, + %{"_id" => "b3", "letter" => "b", "number" => 3, "foo" => "bar"}, + %{"_id" => "b4", "letter" => "b", "number" => 4, "foo" => "bar"}, + %{"_id" => "b5", "letter" => "b", "number" => 5, "foo" => "bar"}, + %{"_id" => "c1", "letter" => "c", "number" => 1, "foo" => "bar"}, + %{"_id" => "c2", "letter" => "c", "number" => 2, "foo" => "bar"} + ] + + @design_doc %{ + "_id" => "_design/test", + "views" => %{ + "offset" => %{ + "map" => "function(doc) { emit([doc.letter, doc.number], doc); }" + } + } + } + + @tag :with_db + test "basic view offsets", context do + db_name = context[:db_name] + save(db_name, @design_doc) + bulk_save(db_name, @docs) + + [ + [["c", 2], 0], + [["c", 1], 1], + [["b", 5], 2], + [["b", 4], 3], + [["b", 3], 4], + [["b", 2], 5], + [["b", 1], 6], + [["a", 3], 7], + [["a", 2], 8], + [["a", 1], 9] + ] + |> Enum.each(fn [start_key, offset] -> + result = + view(db_name, "test/offset", %{ + "startkey" => :jiffy.encode(start_key), + "descending" => true + }) + + assert result.body["offset"] === offset + end) + end + + test "repeated view offsets" do + 0..14 |> Enum.each(fn _ -> repeated_view_offset_test_fun end) + end + + def repeated_view_offset_test_fun do + db_name = random_db_name() + create_db(db_name) + + save(db_name, @design_doc) + bulk_save(db_name, @docs) + + first_response = + view(db_name, "test/offset", %{ + "startkey" => :jiffy.encode(["b", 4]), + "startkey_docid" => "b4", + "endkey" => :jiffy.encode(["b"]), + "descending" => true, + "limit" => 2, + "skip" => 1 + }) + + second_response = + view(db_name, "test/offset", %{ + "startkey" => :jiffy.encode(["c", 3]) + }) + + third_response = + view(db_name, "test/offset", %{ + "startkey" => :jiffy.encode(["b", 6]), + "endkey" => :jiffy.encode(["b", 7]) + }) + + assert first_response.body["offset"] === 4 + assert second_response.body["offset"] === length(@docs) + assert third_response.body["offset"] === 8 + + delete_db(db_name) + end +end diff --git a/test/elixir/test/view_pagination_test.exs b/test/elixir/test/view_pagination_test.exs new file mode 100644 index 00000000000..5aa1cd7895f --- /dev/null +++ b/test/elixir/test/view_pagination_test.exs @@ -0,0 +1,190 @@ +defmodule ViewPaginationTest do + use CouchTestCase + + @moduletag :view_pagination + @moduletag kind: :single_node + + @moduledoc """ + Integration tests for pagination. + This is a port of the view_pagination.js test suite. + """ + + @tag :with_db + test "basic view pagination", context do + db_name = context[:db_name] + + docs = make_docs(0..99) + bulk_save(db_name, docs) + + query_function = "function(doc) { emit(doc.integer, null); }" + + 0..99 + |> Enum.filter(fn number -> rem(number, 10) === 0 end) + |> Enum.each(fn i -> + query_options = %{"startkey" => i, "startkey_docid" => i, limit: 10} + result = query(db_name, query_function, nil, query_options) + assert result["total_rows"] === length(docs) + assert length(result["rows"]) === 10 + assert result["offset"] === i + Enum.each(0..9, &assert(Enum.at(result["rows"], &1)["key"] === &1 + i)) + end) + end + + @tag :with_db + test "aliases start_key and start_key_doc_id should work", context do + db_name = context[:db_name] + + docs = make_docs(0..99) + bulk_save(db_name, docs) + + query_function = "function(doc) { emit(doc.integer, null); }" + + 0..99 + |> Enum.filter(fn number -> rem(number, 10) === 0 end) + |> Enum.each(fn i -> + query_options = %{"start_key" => i, "start_key_docid" => i, limit: 10} + result = query(db_name, query_function, nil, query_options) + assert result["total_rows"] === length(docs) + assert length(result["rows"]) === 10 + assert result["offset"] === i + Enum.each(0..9, &assert(Enum.at(result["rows"], &1)["key"] === &1 + i)) + end) + end + + @tag :with_db + test "descending view pagination", context do + db_name = context[:db_name] + + docs = make_docs(0..99) + bulk_save(db_name, docs) + + query_function = "function(doc) { emit(doc.integer, null); }" + + 100..0 + |> Enum.filter(fn number -> rem(number, 10) === 0 end) + |> Enum.map(&(&1 - 1)) + |> Enum.filter(&(&1 > 0)) + |> Enum.each(fn i -> + query_options = %{ + "startkey" => i, + "startkey_docid" => i, + limit: 10, + descending: true + } + + result = query(db_name, query_function, nil, query_options) + assert result["total_rows"] === length(docs) + assert length(result["rows"]) === 10 + assert result["offset"] === length(docs) - i - 1 + Enum.each(0..9, &assert(Enum.at(result["rows"], &1)["key"] === i - &1)) + end) + end + + @tag :with_db + test "descending=false parameter should just be ignored", context do + db_name = context[:db_name] + + docs = make_docs(0..99) + bulk_save(db_name, docs) + + query_function = "function(doc) { emit(doc.integer, null); }" + + 0..99 + |> Enum.filter(fn number -> rem(number, 10) === 0 end) + |> Enum.each(fn i -> + query_options = %{ + "start_key" => i, + "start_key_docid" => i, + limit: 10, + descending: false + } + + result = query(db_name, query_function, nil, query_options) + assert result["total_rows"] === length(docs) + assert length(result["rows"]) === 10 + assert result["offset"] === i + Enum.each(0..9, &assert(Enum.at(result["rows"], &1)["key"] === &1 + i)) + end) + end + + @tag :with_db + test "endkey document id", context do + db_name = context[:db_name] + + docs = make_docs(0..99) + bulk_save(db_name, docs) + + query_function = "function(doc) { emit(null, null); }" + + query_options = %{ + "startkey" => :null, + "startkey_docid" => 1, + "endkey" => :null, + "endkey_docid" => 40, + } + + result = query(db_name, query_function, nil, query_options) + test_end_key_doc_id(result, docs) + end + + @tag :with_db + test "endkey document id, but with end_key_doc_id alias", context do + db_name = context[:db_name] + + docs = make_docs(0..99) + bulk_save(db_name, docs) + + query_function = "function(doc) { emit(null, null); }" + + query_options = %{ + "start_key" => :null, + "start_key_doc_id" => 1, + "end_key" => :null, + "end_key_doc_id" => 40, + } + + result = query(db_name, query_function, nil, query_options) + test_end_key_doc_id(result, docs) + end + + defp test_end_key_doc_id(query_result, docs) do + assert length(query_result["rows"]) === 35 + assert query_result["total_rows"] === length(docs) + assert query_result["offset"] === 1 + assert Enum.at(query_result["rows"], 0)["id"] === "1" + assert Enum.at(query_result["rows"], 1)["id"] === "10" + assert Enum.at(query_result["rows"], 2)["id"] === "11" + assert Enum.at(query_result["rows"], 3)["id"] === "12" + assert Enum.at(query_result["rows"], 4)["id"] === "13" + assert Enum.at(query_result["rows"], 5)["id"] === "14" + assert Enum.at(query_result["rows"], 6)["id"] === "15" + assert Enum.at(query_result["rows"], 7)["id"] === "16" + assert Enum.at(query_result["rows"], 8)["id"] === "17" + assert Enum.at(query_result["rows"], 9)["id"] === "18" + assert Enum.at(query_result["rows"], 10)["id"] === "19" + assert Enum.at(query_result["rows"], 11)["id"] === "2" + assert Enum.at(query_result["rows"], 12)["id"] === "20" + assert Enum.at(query_result["rows"], 13)["id"] === "21" + assert Enum.at(query_result["rows"], 14)["id"] === "22" + assert Enum.at(query_result["rows"], 15)["id"] === "23" + assert Enum.at(query_result["rows"], 16)["id"] === "24" + assert Enum.at(query_result["rows"], 17)["id"] === "25" + assert Enum.at(query_result["rows"], 18)["id"] === "26" + assert Enum.at(query_result["rows"], 19)["id"] === "27" + assert Enum.at(query_result["rows"], 20)["id"] === "28" + assert Enum.at(query_result["rows"], 21)["id"] === "29" + assert Enum.at(query_result["rows"], 22)["id"] === "3" + assert Enum.at(query_result["rows"], 23)["id"] === "30" + assert Enum.at(query_result["rows"], 24)["id"] === "31" + assert Enum.at(query_result["rows"], 25)["id"] === "32" + assert Enum.at(query_result["rows"], 26)["id"] === "33" + assert Enum.at(query_result["rows"], 27)["id"] === "34" + assert Enum.at(query_result["rows"], 28)["id"] === "35" + assert Enum.at(query_result["rows"], 29)["id"] === "36" + assert Enum.at(query_result["rows"], 30)["id"] === "37" + assert Enum.at(query_result["rows"], 31)["id"] === "38" + assert Enum.at(query_result["rows"], 32)["id"] === "39" + assert Enum.at(query_result["rows"], 33)["id"] === "4" + assert Enum.at(query_result["rows"], 34)["id"] === "40" + end +end diff --git a/test/elixir/test/view_sandboxing_test.exs b/test/elixir/test/view_sandboxing_test.exs new file mode 100644 index 00000000000..02087ac60eb --- /dev/null +++ b/test/elixir/test/view_sandboxing_test.exs @@ -0,0 +1,194 @@ +defmodule ViewSandboxingTest do + use CouchTestCase + + @moduletag :view_sandboxing + @moduletag kind: :single_node + + @document %{integer: 1, string: "1", array: [1, 2, 3]} + + @tag :with_db + test "attempting to change the document has no effect", context do + db_name = context[:db_name] + + {:ok, _} = create_doc(db_name, @document) + + map_fun = """ + function(doc) { + doc.integer = 2; + emit(null, doc); + } + """ + + resp = query(db_name, map_fun, nil, %{include_docs: true}) + rows = resp["rows"] + # either we have an error or our doc is unchanged + assert resp["total_rows"] == 0 or Enum.at(rows, 0)["doc"]["integer"] == 1 + + map_fun = """ + function(doc) { + doc.array[0] = 0; + emit(null, doc); + } + """ + + resp = query(db_name, map_fun, nil, %{include_docs: true}) + row = Enum.at(resp["rows"], 0) + # either we have an error or our doc is unchanged + assert resp["total_rows"] == 0 or Enum.at(row["doc"]["array"], 0) == 1 + end + + @tag :with_db + test "view cannot invoke interpreter internals", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @document) + + map_fun = """ + function(doc) { + gc(); + emit(null, doc); + } + """ + + # make sure that a view cannot invoke interpreter internals such as the + # garbage collector + resp = query(db_name, map_fun) + assert resp["total_rows"] == 0 + end + + @tag :with_db + test "view cannot access the map_funs and map_results array", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @document) + + map_fun = """ + function(doc) { + map_funs.push(1); + emit(null, doc); + } + """ + + resp = query(db_name, map_fun) + assert resp["total_rows"] == 0 + + map_fun = """ + function(doc) { + map_results.push(1); + emit(null, doc); + } + """ + + resp = query(db_name, map_fun) + assert resp["total_rows"] == 0 + end + + @tag :with_db + test "COUCHDB-925 - altering 'doc' variable in map function affects other map functions", + context do + db_name = context[:db_name] + + ddoc = %{ + _id: "_design/foobar", + language: "javascript", + views: %{ + view1: %{ + map: """ + function(doc) { + if (doc.values) { + doc.values = [666]; + } + if (doc.tags) { + doc.tags.push("qwerty"); + } + if (doc.tokens) { + doc.tokens["c"] = 3; + } + } + """ + }, + view2: %{ + map: """ + function(doc) { + if (doc.values) { + emit(doc._id, doc.values); + } + if (doc.tags) { + emit(doc._id, doc.tags); + } + if (doc.tokens) { + emit(doc._id, doc.tokens); + } + } + """ + } + } + } + + doc1 = %{ + _id: "doc1", + values: [1, 2, 3] + } + + doc2 = %{ + _id: "doc2", + tags: ["foo", "bar"], + tokens: %{a: 1, b: 2} + } + + {:ok, _} = create_doc(db_name, ddoc) + {:ok, _} = create_doc(db_name, doc1) + {:ok, _} = create_doc(db_name, doc2) + + resp1 = view(db_name, "foobar/view1") + resp2 = view(db_name, "foobar/view2") + + assert Enum.empty?(resp1.body["rows"]) + assert length(resp2.body["rows"]) == 3 + + assert doc1[:_id] == Enum.at(resp2.body["rows"], 0)["key"] + assert doc2[:_id] == Enum.at(resp2.body["rows"], 1)["key"] + assert doc2[:_id] == Enum.at(resp2.body["rows"], 2)["key"] + + assert length(Enum.at(resp2.body["rows"], 0)["value"]) == 3 + + row0_values = Enum.at(resp2.body["rows"], 0)["value"] + + assert Enum.at(row0_values, 0) == 1 + assert Enum.at(row0_values, 1) == 2 + assert Enum.at(row0_values, 2) == 3 + + row1_values = Enum.at(resp2.body["rows"], 1)["value"] + row2_values = Enum.at(resp2.body["rows"], 2)["value"] + + # we can't be 100% sure about the order for the same key + assert (is_map(row1_values) and row1_values["a"] == 1) or + (is_list(row1_values) and Enum.at(row1_values, 0) == "foo") + + assert (is_map(row1_values) and row1_values["b"] == 2) or + (is_list(row1_values) and Enum.at(row1_values, 1) == "bar") + + assert (is_map(row2_values) and row2_values["a"] == 1) or + (is_list(row2_values) and Enum.at(row2_values, 0) == "foo") + + assert (is_map(row2_values) and row2_values["b"] == 2) or + (is_list(row2_values) and Enum.at(row2_values, 1) == "bar") + + assert is_list(row1_values) or !Map.has_key?(row1_values, "c") + assert is_list(row2_values) or !Map.has_key?(row2_values, "c") + end + + @tag :with_db + test "runtime code evaluation can be prevented", context do + db_name = context[:db_name] + {:ok, _} = create_doc(db_name, @document) + + map_fun = """ + function(doc) { + var glob = emit.constructor('return this')(); + emit(doc._id, null); + } + """ + + resp = query(db_name, map_fun) + assert resp["total_rows"] == 0 + end +end diff --git a/test/elixir/test/view_test.exs b/test/elixir/test/view_test.exs index 5fb8c009c24..f768cef1689 100644 --- a/test/elixir/test/view_test.exs +++ b/test/elixir/test/view_test.exs @@ -2,6 +2,7 @@ defmodule ViewTest do use CouchTestCase @moduletag :view + @moduletag kind: :single_node @moduledoc """ Test CouchDB /{db}/_design/{ddoc}/_view/{view} diff --git a/test/elixir/test/view_update_seq_test.exs b/test/elixir/test/view_update_seq_test.exs new file mode 100644 index 00000000000..d1eca403854 --- /dev/null +++ b/test/elixir/test/view_update_seq_test.exs @@ -0,0 +1,143 @@ +defmodule ViewUpdateSeqTest do + use CouchTestCase + + @moduletag :view_update_seq + @moduletag kind: :single_node + + @moduledoc """ + This is a port of the view_update_seq.js test suite. + """ + + @design_doc %{ + _id: "_design/test", + language: "javascript", + autoupdate: false, + views: %{ + all_docs: %{ + map: "function(doc) { emit(doc.integer, doc.string) }" + }, + summate: %{ + map: + "function (doc) { if (typeof doc.integer === 'number') { emit(doc.integer, doc.integer)}; }", + reduce: "function (keys, values) { return sum(values); };" + } + } + } + + defp seq_int(seq) do + {int, _} = + seq + |> String.split("-") + |> Enum.at(0) + |> Integer.parse() + + int + end + + @tag :with_db + test "db info update seq", context do + db_name = context[:db_name] + + info = info(db_name) + assert seq_int(info["update_seq"]) == 0 + + create_doc(db_name, @design_doc) + + info = info(db_name) + assert seq_int(info["update_seq"]) == 1 + end + + @tag :with_db + test "_all_docs update seq", context do + db_name = context[:db_name] + + resp = Couch.get("/#{db_name}/_all_docs", query: %{:update_seq => true}) + assert seq_int(resp.body["update_seq"]) == 0 + + create_doc(db_name, @design_doc) + + resp = Couch.get("/#{db_name}/_all_docs", query: %{:update_seq => true}) + assert length(resp.body["rows"]) == 1 + assert seq_int(resp.body["update_seq"]) == 1 + + docs = make_docs(0..99) + bulk_save(db_name, docs) + + resp = Couch.get("/#{db_name}/_all_docs", query: %{:limit => 1}) + assert length(resp.body["rows"]) == 1 + assert Map.has_key?(resp.body, "update_seq") == false + + resp = Couch.get("/#{db_name}/_all_docs", query: %{:limit => 1, :update_seq => true}) + assert length(resp.body["rows"]) == 1 + assert seq_int(resp.body["update_seq"]) == 101 + end + + @tag :with_db + test "view update seq", context do + db_name = context[:db_name] + + create_doc(db_name, @design_doc) + docs = make_docs(0..99) + bulk_save(db_name, docs) + + resp = view(db_name, "test/all_docs", %{:limit => 1, :update_seq => true}) + assert length(resp.body["rows"]) == 1 + assert seq_int(resp.body["update_seq"]) == 101 + + resp = view(db_name, "test/all_docs", %{:limit => 1, :update_seq => false}) + assert length(resp.body["rows"]) == 1 + assert Map.has_key?(resp.body, "update_seq") == false + + resp = view(db_name, "test/summate", %{:update_seq => true}) + assert length(resp.body["rows"]) == 1 + assert seq_int(resp.body["update_seq"]) == 101 + + save(db_name, %{"_id" => "A", "integer" => 1}) + + resp = + view(db_name, "test/all_docs", %{:limit => 1, :stale => "ok", :update_seq => true}) + + assert length(resp.body["rows"]) == 1 + assert seq_int(resp.body["update_seq"]) == 101 + + save(db_name, %{"_id" => "AA", "integer" => 2}) + + resp = + view(db_name, "test/all_docs", %{ + :limit => 1, + :stale => "update_after", + :update_seq => true + }) + + assert length(resp.body["rows"]) == 1 + assert seq_int(resp.body["update_seq"]) == 101 + + retry_until(fn -> + resp = + view(db_name, "test/all_docs", %{:limit => 1, :stale => "ok", :update_seq => true}) + + assert length(resp.body["rows"]) == 1 + seq_int(resp.body["update_seq"]) == 103 + end) + + resp = + view(db_name, "test/all_docs", %{:limit => 1, :stale => "ok", :update_seq => true}) + + assert length(resp.body["rows"]) == 1 + assert seq_int(resp.body["update_seq"]) == 103 + + resp = view(db_name, "test/all_docs", %{:limit => 1, :update_seq => true}) + + assert length(resp.body["rows"]) == 1 + assert seq_int(resp.body["update_seq"]) == 103 + + resp = view(db_name, "test/all_docs", %{:update_seq => true}, ["0", "1"]) + assert seq_int(resp.body["update_seq"]) == 103 + + resp = view(db_name, "test/all_docs", %{:update_seq => true}, ["0", "1"]) + assert seq_int(resp.body["update_seq"]) == 103 + + resp = view(db_name, "test/summate", %{:group => true, :update_seq => true}, [0, 1]) + assert seq_int(resp.body["update_seq"]) == 103 + end +end diff --git a/test/javascript/cli_runner.js b/test/javascript/cli_runner.js index 73467626b95..a35348f20e1 100644 --- a/test/javascript/cli_runner.js +++ b/test/javascript/cli_runner.js @@ -11,6 +11,19 @@ // the License. // +/* + * Quit current test execution if it is tagged as skipped or ported to elixir + */ +function quitIfSkippedOrPorted() { + if(couchTests.skip) { + quit(2); + } + + if(couchTests.elixir) { + quit(3); + } +} + /* * Futon test suite was designed to be able to run all tests populated into * couchTests. Here we should only be loading one test, so we'll pop the first @@ -22,14 +35,6 @@ function runTest() { var count = 0; var start = new Date().getTime(); - if(couchTests.skip) { - quit(2); - } - - if(couchTests.elixir) { - quit(3); - } - for(var name in couchTests) { count++; } @@ -51,6 +56,8 @@ function runTest() { } } +quitIfSkippedOrPorted(); + waitForSuccess(CouchDB.isRunning, 'isRunning'); runTest(); diff --git a/test/javascript/tests/changes.js b/test/javascript/tests/changes.js index d312edc41d2..338c1571cd9 100644 --- a/test/javascript/tests/changes.js +++ b/test/javascript/tests/changes.js @@ -9,7 +9,8 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; + function jsonp(obj) { T(jsonp_flag == 0); T(obj.results.length == 1 && obj.last_seq == 1, "jsonp"); @@ -17,6 +18,8 @@ function jsonp(obj) { } couchTests.changes = function(debug) { + return console.log('done in test/elixir/test/changes_test.exs and changes_async_test.exs'); + var db; if (debug) debugger; @@ -359,7 +362,7 @@ couchTests.changes = function(debug) { resp = JSON.parse(req.responseText); T(resp.results.length == 1, "changes_filter/dynamic&field=bop"); T(resp.results[0].changes[0].rev == docres1.rev, "filtered/dynamic&field=bop rev"); - + // these will NEVER run as we're always in navigator == undefined if (!is_safari && xhr) { // full test requires parallel connections // filter with longpoll @@ -708,7 +711,7 @@ couchTests.changes = function(debug) { db = new CouchDB(db_name, {"X-Couch-Full-Commit":"true"}, {"w": 3}); T(db.createDb()); - // create 4 documents... this assumes the update sequnce will start from 0 and then do sth in the cluster + // create 4 documents... this assumes the update sequnce will start from 0 and then do sth in the cluster db.save({"bop" : "foom"}); db.save({"bop" : "foom"}); db.save({"bop" : "foom"}); @@ -717,7 +720,7 @@ couchTests.changes = function(debug) { req = CouchDB.request("GET", "/" + db_name + "/_changes"); // simulate an EventSource request with a Last-Event-ID header - // increase timeout to 100 to have enough time 2 assemble (seems like too little timeouts kill + // increase timeout to 100 to have enough time 2 assemble (seems like too little timeouts kill req = CouchDB.request("GET", "/" + db_name + "/_changes?feed=eventsource&timeout=100&since=0", {"headers": {"Accept": "text/event-stream", "Last-Event-ID": JSON.parse(req.responseText).results[1].seq}}); diff --git a/test/javascript/tests/design_docs.js b/test/javascript/tests/design_docs.js index 55e592a18c0..dd2d0e307ae 100644 --- a/test/javascript/tests/design_docs.js +++ b/test/javascript/tests/design_docs.js @@ -10,7 +10,9 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.design_docs = function(debug) { + return console.log('done in test/elixir/test/design_docs.exs'); var db_name = get_random_db_name(); var db_name_a = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/design_docs_query.js b/test/javascript/tests/design_docs_query.js index 07e6577ab68..7b4b612c086 100644 --- a/test/javascript/tests/design_docs_query.js +++ b/test/javascript/tests/design_docs_query.js @@ -9,8 +9,10 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.design_docs_query = function(debug) { + return console.log('done in test/elixir/test/design_docs_query_test.exs'); + var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); db.createDb(); diff --git a/test/javascript/tests/design_options.js b/test/javascript/tests/design_options.js index cc2571f6b63..aaab39e5b2b 100644 --- a/test/javascript/tests/design_options.js +++ b/test/javascript/tests/design_options.js @@ -9,8 +9,9 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.design_options = function(debug) { + return console.log('done in test/elixir/test/design_options.exs'); var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); db.createDb(); @@ -36,7 +37,7 @@ couchTests.design_options = function(debug) { T(db.save(designDoc).ok); // should work for temp views - // no more there on cluster - pointless test + // no more there on cluster - pointless test //var rows = db.query(map, null, {options:{include_design: true}}).rows; //T(rows.length == 1); //T(rows[0].value == "_design/fu"); diff --git a/test/javascript/tests/design_paths.js b/test/javascript/tests/design_paths.js index 6e816991ad5..e1d64ea7792 100644 --- a/test/javascript/tests/design_paths.js +++ b/test/javascript/tests/design_paths.js @@ -9,8 +9,9 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.design_paths = function(debug) { + return console.log('done in test/elixir/test/design_paths.exs'); if (debug) debugger; var db_name = get_random_db_name() var dbNames = [db_name, db_name + "/with_slashes"]; diff --git a/test/javascript/tests/erlang_views.js b/test/javascript/tests/erlang_views.js index 9b15e104355..140925f58ce 100644 --- a/test/javascript/tests/erlang_views.js +++ b/test/javascript/tests/erlang_views.js @@ -9,7 +9,7 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.erlang_views = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/form_submit.js b/test/javascript/tests/form_submit.js index 356182e8d46..f8dd2baf014 100644 --- a/test/javascript/tests/form_submit.js +++ b/test/javascript/tests/form_submit.js @@ -11,7 +11,10 @@ // the License. // Do some basic tests. +couchTests.elixir = true; couchTests.form_submit = function(debug) { + return console.log('done in test/elixir/test/form_summit_test.exs'); + var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); db.createDb(); diff --git a/test/javascript/tests/http.js b/test/javascript/tests/http.js index c7817789747..bc35921e16e 100644 --- a/test/javascript/tests/http.js +++ b/test/javascript/tests/http.js @@ -9,8 +9,9 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.http = function(debug) { + return console.log('done in test/elixir/test/http_test.exs'); var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/jsonp.js b/test/javascript/tests/jsonp.js index 1013c9eba40..f34fdc9c5e9 100644 --- a/test/javascript/tests/jsonp.js +++ b/test/javascript/tests/jsonp.js @@ -9,6 +9,7 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; // Verify callbacks ran var jsonp_flag = 0; @@ -28,6 +29,7 @@ function jsonp_chunk(doc) { // Do some jsonp tests. couchTests.jsonp = function(debug) { + return console.log('done in test/elixir/test/jsonp_test.exs'); var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); db.createDb(); diff --git a/test/javascript/tests/list_views.js b/test/javascript/tests/list_views.js index e255e15461b..2d74586fe9f 100644 --- a/test/javascript/tests/list_views.js +++ b/test/javascript/tests/list_views.js @@ -9,7 +9,7 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.list_views = function(debug) { var db_name = get_random_db_name(); diff --git a/test/javascript/tests/method_override.js b/test/javascript/tests/method_override.js index fa3e5e88f32..94d798f967d 100644 --- a/test/javascript/tests/method_override.js +++ b/test/javascript/tests/method_override.js @@ -11,7 +11,9 @@ // the License. // Allow broken HTTP clients to fake a full method vocabulary with an X-HTTP-METHOD-OVERRIDE header +couchTests.elixir = true; couchTests.method_override = function(debug) { + return console.log('done in test/elixir/test/method_override_test.exs'); var result = JSON.parse(CouchDB.request("GET", "/").responseText); T(result.couchdb == "Welcome"); diff --git a/test/javascript/tests/proxyauth.js b/test/javascript/tests/proxyauth.js index cc75faaf3dc..a91f28c32af 100644 --- a/test/javascript/tests/proxyauth.js +++ b/test/javascript/tests/proxyauth.js @@ -9,12 +9,11 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - - - + +couchTests.elixir = true; couchTests.proxyauth = function(debug) { // this test proxy authentification handler - + return console.log('done in test/elixir/test/proxyauth_test.exs'); var users_db_name = get_random_db_name(); var usersDb = new CouchDB(users_db_name, {"X-Couch-Full-Commit":"false"}); usersDb.createDb(); @@ -22,9 +21,9 @@ couchTests.proxyauth = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); db.createDb(); - + if (debug) debugger; - + // Simple secret key generator function generateSecret(length) { var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; @@ -34,16 +33,16 @@ couchTests.proxyauth = function(debug) { } return secret; } - + var secret = generateSecret(64); - + function TestFun() { - + var benoitcUserDoc = CouchDB.prepareUserDoc({ name: "benoitc@apache.org" }, "test"); T(usersDb.save(benoitcUserDoc).ok); - + T(CouchDB.session().userCtx.name == null); // test that you can use basic auth aginst the users db @@ -54,20 +53,20 @@ couchTests.proxyauth = function(debug) { }); T(s.userCtx.name == "benoitc@apache.org"); T(s.info.authenticated == "default"); - + CouchDB.logout(); -/* XXX: None of the rest of this is supported yet in 2.0 +/* XXX: None of the rest of this is supported yet in 2.0 var headers = { "X-Auth-CouchDB-UserName": "benoitc@apache.org", "X-Auth-CouchDB-Roles": "test", "X-Auth-CouchDB-Token": hex_hmac_sha1(secret, "benoitc@apache.org") }; - + var designDoc = { _id:"_design/test", language: "javascript", - + shows: { "welcome": stringFun(function(doc,req) { return "Welcome " + req.userCtx["name"]; @@ -79,53 +78,53 @@ couchTests.proxyauth = function(debug) { }; db.save(designDoc); - + var req = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/welcome", {headers: headers}); T(req.responseText == "Welcome benoitc@apache.org", req.responseText); - + req = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/role", {headers: headers}); T(req.responseText == "test"); - + var xhr = CouchDB.request("PUT", "/_node/node1@127.0.0.1/_config/couch_httpd_auth/proxy_use_secret",{ body : JSON.stringify("true"), headers: {"X-Couch-Persist": "false"} }); T(xhr.status == 200); - + req = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/welcome", {headers: headers}); T(req.responseText == "Welcome benoitc@apache.org"); - + req = CouchDB.request("GET", "/" + db_name + "/_design/test/_show/role", {headers: headers}); T(req.responseText == "test"); */ } - + run_on_modified_server( [{section: "httpd", key: "authentication_handlers", value:"{chttpd_auth, proxy_authentication_handler}, {chttpd_auth, default_authentication_handler}"}, {section: "chttpd_auth", - key: "authentication_db", + key: "authentication_db", value: users_db_name}, {section: "chttpd_auth", - key: "secret", + key: "secret", value: secret}, {section: "chttpd_auth", - key: "x_auth_username", + key: "x_auth_username", value: "X-Auth-CouchDB-UserName"}, {section: "chttpd_auth", - key: "x_auth_roles", + key: "x_auth_roles", value: "X-Auth-CouchDB-Roles"}, {section: "chttpd_auth", - key: "x_auth_token", + key: "x_auth_token", value: "X-Auth-CouchDB-Token"}, {section: "chttpd_auth", - key: "proxy_use_secret", + key: "proxy_use_secret", value: "false"}], TestFun ); diff --git a/test/javascript/tests/purge.js b/test/javascript/tests/purge.js index 0c11d9ad8fd..15fd6371091 100644 --- a/test/javascript/tests/purge.js +++ b/test/javascript/tests/purge.js @@ -9,7 +9,7 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.purge = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/reader_acl.js b/test/javascript/tests/reader_acl.js index 8dc28aae9f1..d5a92354960 100644 --- a/test/javascript/tests/reader_acl.js +++ b/test/javascript/tests/reader_acl.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.reader_acl = function(debug) { // this tests read access control diff --git a/test/javascript/tests/recreate_doc.js b/test/javascript/tests/recreate_doc.js index 154a6e45b5a..1aa44ede823 100644 --- a/test/javascript/tests/recreate_doc.js +++ b/test/javascript/tests/recreate_doc.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.recreate_doc = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}, {"w": 3}); diff --git a/test/javascript/tests/reduce_builtin.js b/test/javascript/tests/reduce_builtin.js index 4686841e3c2..77d8d1b34eb 100644 --- a/test/javascript/tests/reduce_builtin.js +++ b/test/javascript/tests/reduce_builtin.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.reduce_builtin = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/reduce_false.js b/test/javascript/tests/reduce_false.js index 81b4c8a4fe6..69d8b0cf428 100644 --- a/test/javascript/tests/reduce_false.js +++ b/test/javascript/tests/reduce_false.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.reduce_false = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/reduce_false_temp.js b/test/javascript/tests/reduce_false_temp.js index 51b23bd6ba0..a13b4ab184c 100644 --- a/test/javascript/tests/reduce_false_temp.js +++ b/test/javascript/tests/reduce_false_temp.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.skip = true; couchTests.reduce_false_temp = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/replicator_db_bad_rep_id.js b/test/javascript/tests/replicator_db_bad_rep_id.js index 30a12450539..0912c1bc00a 100644 --- a/test/javascript/tests/replicator_db_bad_rep_id.js +++ b/test/javascript/tests/replicator_db_bad_rep_id.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.replicator_db_bad_rep_id = function(debug) { //return console.log('TODO'); if (debug) debugger; diff --git a/test/javascript/tests/replicator_db_by_doc_id.js b/test/javascript/tests/replicator_db_by_doc_id.js index d9de0f1195f..bc15b03d2f5 100644 --- a/test/javascript/tests/replicator_db_by_doc_id.js +++ b/test/javascript/tests/replicator_db_by_doc_id.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.replicator_db_by_doc_id = function(debug) { //return console.log('TODO'); diff --git a/test/javascript/tests/rev_stemming.js b/test/javascript/tests/rev_stemming.js index 238868f6015..725c0f1c9d9 100644 --- a/test/javascript/tests/rev_stemming.js +++ b/test/javascript/tests/rev_stemming.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.rev_stemming = function(debug) { var db_name_orig = get_random_db_name(); diff --git a/test/javascript/tests/rewrite.js b/test/javascript/tests/rewrite.js index a984936d104..88479b87763 100644 --- a/test/javascript/tests/rewrite.js +++ b/test/javascript/tests/rewrite.js @@ -10,7 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.rewrite = function(debug) { if (debug) debugger; diff --git a/test/javascript/tests/rewrite_js.js b/test/javascript/tests/rewrite_js.js index 22de6c940bf..7179fc9f5b2 100644 --- a/test/javascript/tests/rewrite_js.js +++ b/test/javascript/tests/rewrite_js.js @@ -11,7 +11,7 @@ // the License. - +couchTests.elixir = true; couchTests.rewrite = function(debug) { if (debug) debugger; var dbNames = [get_random_db_name(), get_random_db_name() + "test_suite_db/with_slashes"]; @@ -116,7 +116,6 @@ couchTests.rewrite = function(debug) { }), lists: { simpleForm: stringFun(function(head, req) { - log("simpleForm"); send('
    '); var row, row_number = 0, prevKey, firstKey = null; while (row = getRow()) { @@ -345,6 +344,22 @@ couchTests.rewrite = function(debug) { var xhr = CouchDB.request("GET", url); TEquals(400, xhr.status); + // test requests with body preserve the query string rewrite + var ddoc_qs = { + "_id": "_design/qs", + "rewrites": "function (r) { return {path: '../../_changes', query: {'filter': '_doc_ids'}};};" + } + db.save(ddoc_qs); + db.save({"_id": "qs1", "foo": "bar"}); + db.save({"_id": "qs2", "foo": "bar"}); + + var url = "/"+dbName+"/_design/qs/_rewrite"; + + var xhr = CouchDB.request("POST", url, {body: JSON.stringify({"doc_ids": ["qs2"]})}); + var result = JSON.parse(xhr.responseText); + T(xhr.status == 200); + T(result.results.length == 1, "Only one doc is expected"); + TEquals(result.results[0].id, "qs2"); // cleanup db.deleteDb(); } diff --git a/test/javascript/tests/security_validation.js b/test/javascript/tests/security_validation.js index 6f0bd0f4221..365f716e60f 100644 --- a/test/javascript/tests/security_validation.js +++ b/test/javascript/tests/security_validation.js @@ -9,7 +9,7 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.security_validation = function(debug) { var db_name = get_random_db_name(); diff --git a/test/javascript/tests/show_documents.js b/test/javascript/tests/show_documents.js index 172a7953224..e604f3058c3 100644 --- a/test/javascript/tests/show_documents.js +++ b/test/javascript/tests/show_documents.js @@ -9,7 +9,7 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true couchTests.show_documents = function(debug) { var db_name = get_random_db_name(); diff --git a/test/javascript/tests/update_documents.js b/test/javascript/tests/update_documents.js index 6cd4a91d698..913c99a57cf 100644 --- a/test/javascript/tests/update_documents.js +++ b/test/javascript/tests/update_documents.js @@ -10,7 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true couchTests.update_documents = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/users_db_security.js b/test/javascript/tests/users_db_security.js index faffd8c27bb..3e293c5ebae 100644 --- a/test/javascript/tests/users_db_security.js +++ b/test/javascript/tests/users_db_security.js @@ -9,7 +9,7 @@ // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. - +couchTests.elixir = true; couchTests.users_db_security = function(debug) { var db_name = '_users'; var usersDb = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/view_collation_raw.js b/test/javascript/tests/view_collation_raw.js index 9b02ff49d3a..ee990bc4c57 100644 --- a/test/javascript/tests/view_collation_raw.js +++ b/test/javascript/tests/view_collation_raw.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.view_collation_raw = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/view_compaction.js b/test/javascript/tests/view_compaction.js index d1a1e8790eb..f2af390586e 100644 --- a/test/javascript/tests/view_compaction.js +++ b/test/javascript/tests/view_compaction.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.view_compaction = function(debug) { if (debug) debugger; diff --git a/test/javascript/tests/view_conflicts.js b/test/javascript/tests/view_conflicts.js index b1c938c6144..b1efa234fbe 100644 --- a/test/javascript/tests/view_conflicts.js +++ b/test/javascript/tests/view_conflicts.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true couchTests.view_conflicts = function(debug) { var db_name_a = get_random_db_name(); diff --git a/test/javascript/tests/view_errors.js b/test/javascript/tests/view_errors.js index 6b9c75466af..4774220302d 100644 --- a/test/javascript/tests/view_errors.js +++ b/test/javascript/tests/view_errors.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.view_errors = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/view_include_docs.js b/test/javascript/tests/view_include_docs.js index cefc2cf9015..b96227d7545 100644 --- a/test/javascript/tests/view_include_docs.js +++ b/test/javascript/tests/view_include_docs.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.view_include_docs = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/view_multi_key_all_docs.js b/test/javascript/tests/view_multi_key_all_docs.js index 6704a0ffa9b..8969c88c907 100644 --- a/test/javascript/tests/view_multi_key_all_docs.js +++ b/test/javascript/tests/view_multi_key_all_docs.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.view_multi_key_all_docs = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/view_multi_key_design.js b/test/javascript/tests/view_multi_key_design.js index a50d1fb9f15..20e52a2d0b5 100644 --- a/test/javascript/tests/view_multi_key_design.js +++ b/test/javascript/tests/view_multi_key_design.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.view_multi_key_design = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/view_multi_key_temp.js b/test/javascript/tests/view_multi_key_temp.js index 25bec4b31ce..2bed6e7bf74 100644 --- a/test/javascript/tests/view_multi_key_temp.js +++ b/test/javascript/tests/view_multi_key_temp.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.skip = true; couchTests.view_multi_key_temp = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/view_offsets.js b/test/javascript/tests/view_offsets.js index 8b39cc24737..179c963607b 100644 --- a/test/javascript/tests/view_offsets.js +++ b/test/javascript/tests/view_offsets.js @@ -10,6 +10,8 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; + couchTests.view_offsets = function(debug) { if (debug) debugger; diff --git a/test/javascript/tests/view_pagination.js b/test/javascript/tests/view_pagination.js index df5390eb35a..6da5f8d48d3 100644 --- a/test/javascript/tests/view_pagination.js +++ b/test/javascript/tests/view_pagination.js @@ -10,6 +10,8 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; + couchTests.view_pagination = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/view_sandboxing.js b/test/javascript/tests/view_sandboxing.js index 1cdd815dea8..0e5f308a9c2 100644 --- a/test/javascript/tests/view_sandboxing.js +++ b/test/javascript/tests/view_sandboxing.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true couchTests.view_sandboxing = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"}); diff --git a/test/javascript/tests/view_update_seq.js b/test/javascript/tests/view_update_seq.js index c14453f0531..8b3a3fb848c 100644 --- a/test/javascript/tests/view_update_seq.js +++ b/test/javascript/tests/view_update_seq.js @@ -10,6 +10,7 @@ // License for the specific language governing permissions and limitations under // the License. +couchTests.elixir = true; couchTests.view_update_seq = function(debug) { var db_name = get_random_db_name(); var db = new CouchDB(db_name, {"X-Couch-Full-Commit":"false"});